428 if update_fields:
429 cursor.execute(f'''
430 UPDATE artefacts
431 SET {', '.join(update_fields)}
432 WHERE id = ?
433 ''', params)
434
10 import sys 11 import subprocess 12 import datetime
44 print("Running Bandit scan...")
45 result = subprocess.run(cmd, capture_output=True, text=True)
46
52 ] 53 subprocess.run(html_cmd, capture_output=True) 54
118 ]
119 subprocess.run(cmd, capture_output=True)
120 print(f" Analyzed: {py_file}")
179 cmd = ['safety', 'check', '--output', 'text'] 180 result = subprocess.run(cmd, capture_output=True, text=True) 181
117
118 assert result == True
119 print("✓ Artefact creation with valid file works")
129
130 assert result == False
131 print("✓ Artefact creation correctly fails with non-existent file")
143
144 assert result == False
145 print("✓ Artefact creation correctly fails with invalid type")
162
163 assert result == False
164 print("✓ Artefact creation correctly restricted to creators/admins")
175 176 assert isinstance(artefacts, list) 177 assert len(artefacts) >= 2 # Should see at least the artefacts we created
176 assert isinstance(artefacts, list) 177 assert len(artefacts) >= 2 # Should see at least the artefacts we created 178
180 for artefact in artefacts: 181 assert len(artefact) == 5 # id, title, type, created, updated 182 assert isinstance(artefact[0], int) # id
181 assert len(artefact) == 5 # id, title, type, created, updated 182 assert isinstance(artefact[0], int) # id 183 assert isinstance(artefact[1], str) # title
182 assert isinstance(artefact[0], int) # id 183 assert isinstance(artefact[1], str) # title 184 assert artefact[2] in ['lyric', 'score', 'recording'] # type
183 assert isinstance(artefact[1], str) # title 184 assert artefact[2] in ['lyric', 'score', 'recording'] # type 185
195 artefacts = self.artefact_manager.list_artefacts(user_only=True) 196 assert len(artefacts) > 0 197 artefact_id = artefacts[0][0]
202 203 assert result == True 204 assert os.path.exists(output_path)
203 assert result == True 204 assert os.path.exists(output_path) 205
212
213 assert original_content == downloaded_content
214 print("✓ Artefact download and content verification works")
220
221 assert result == False
222 print("✓ Artefact download correctly fails with non-existent ID")
240
241 assert result == True
242 print("✓ Artefact metadata update works correctly")
257
258 assert result == True
259 print("✓ Artefact file replacement update works correctly")
278
279 assert result == False
280 print("✓ Artefact update correctly restricted to owners/admins")
294 295 assert result == True 296
298 artefacts_after = self.artefact_manager.list_artefacts(user_only=True) 299 assert len(artefacts_after) == len(artefacts) - 1 300
315
316 assert result == True
317 print("✓ Artefact integrity verification works correctly")
331 ) 332 assert result == True 333
338 for artefact_type in artefact_types: 339 assert artefact_type in created_types 340
39 # Verify checksum properties 40 assert len(checksum) == 64 # SHA-256 produces 64 char hex string 41 assert isinstance(checksum, str)
40 assert len(checksum) == 64 # SHA-256 produces 64 char hex string 41 assert isinstance(checksum, str) 42 assert all(c in '0123456789abcdef' for c in checksum) # Valid hex
41 assert isinstance(checksum, str) 42 assert all(c in '0123456789abcdef' for c in checksum) # Valid hex 43
65 # Same content should produce same checksum
66 assert checksum1 == checksum2
67 print("✓ Checksum consistency verified")
92 # Different content should produce different checksums
93 assert checksum1 != checksum2
94 print("✓ Different content produces different checksums")
114 # Verify encrypted data is different from original 115 assert encrypted_data != self.test_data 116 assert os.path.exists(storage_path)
115 assert encrypted_data != self.test_data 116 assert os.path.exists(storage_path) 117
119 decrypted_data = self.security.decrypt_file(storage_path, data_key) 120 assert decrypted_data == self.test_data 121
138 # Verify encrypted key is different 139 assert encrypted_key != original_data_key 140
144 # Verify we get the original key back 145 assert original_data_key == decrypted_key 146
159 # Verify checksum matches
160 assert self.security.verify_checksum(temp_path, checksum) == True
161 print("✓ Checksum verification success case works")
181 # Verify checksum fails
182 assert self.security.verify_checksum(temp_path, original_checksum) == False
183 print("✓ Checksum verification failure case works")
194 # Generated keys should be different 195 assert key1 != key2 196 assert len(key1) == len(key2) # But same length
195 assert key1 != key2
196 assert len(key1) == len(key2) # But same length
197 print("✓ Unique data key generation verified")
203 # Verify timestamp is not None and has expected attributes 204 assert timestamp is not None 205 assert hasattr(timestamp, 'year')
204 assert timestamp is not None 205 assert hasattr(timestamp, 'year') 206 assert hasattr(timestamp, 'month')
205 assert hasattr(timestamp, 'year') 206 assert hasattr(timestamp, 'month') 207 assert hasattr(timestamp, 'day')
206 assert hasattr(timestamp, 'month') 207 assert hasattr(timestamp, 'day') 208
33
34 assert result == True
35 print("✓ User registration with valid data works")
40 result1 = self.user_manager.register_user("testuser", "password123", "viewer")
41 assert result1 == True
42
44 result2 = self.user_manager.register_user("testuser", "differentpassword", "creator")
45 assert result2 == False
46
52
53 assert result == False
54 print("✓ Invalid role registration correctly rejected")
63 64 assert result == True 65 assert self.user_manager.current_user is not None
64 assert result == True 65 assert self.user_manager.current_user is not None 66 assert self.user_manager.current_user.username == "testuser"
65 assert self.user_manager.current_user is not None 66 assert self.user_manager.current_user.username == "testuser" 67 assert self.user_manager.current_user.role == "viewer"
66 assert self.user_manager.current_user.username == "testuser" 67 assert self.user_manager.current_user.role == "viewer" 68
78 79 assert result == False 80 assert self.user_manager.current_user is None
79 assert result == False 80 assert self.user_manager.current_user is None 81
87 88 assert result == False 89 assert self.user_manager.current_user is None
88 assert result == False 89 assert self.user_manager.current_user is None 90
99 # Verify user is logged in 100 assert self.user_manager.current_user is not None 101
105 # Verify user is logged out 106 assert self.user_manager.current_user is None 107
112 # Test without any user logged in
113 assert self.user_manager.has_permission('viewer') == False
114
119 # Viewer should have viewer permissions but not creator/admin
120 assert self.user_manager.has_permission('viewer') == True
121 assert self.user_manager.has_permission('creator') == False
120 assert self.user_manager.has_permission('viewer') == True
121 assert self.user_manager.has_permission('creator') == False
122 assert self.user_manager.has_permission('admin') == False
121 assert self.user_manager.has_permission('creator') == False
122 assert self.user_manager.has_permission('admin') == False
123
129 # Creator should have viewer and creator permissions but not admin
130 assert self.user_manager.has_permission('viewer') == True
131 assert self.user_manager.has_permission('creator') == True
130 assert self.user_manager.has_permission('viewer') == True
131 assert self.user_manager.has_permission('creator') == True
132 assert self.user_manager.has_permission('admin') == False
131 assert self.user_manager.has_permission('creator') == True
132 assert self.user_manager.has_permission('admin') == False
133
139 # Admin should have all permissions
140 assert self.user_manager.has_permission('viewer') == True
141 assert self.user_manager.has_permission('creator') == True
140 assert self.user_manager.has_permission('viewer') == True
141 assert self.user_manager.has_permission('creator') == True
142 assert self.user_manager.has_permission('admin') == True
141 assert self.user_manager.has_permission('creator') == True
142 assert self.user_manager.has_permission('admin') == True
143
152 users = self.user_manager.list_users() 153 assert users == [] # Should return empty list for non-admin 154
160 users = self.user_manager.list_users() 161 assert isinstance(users, list) 162 # Should see at least the admin user itself
162 # Should see at least the admin user itself 163 assert len(users) >= 1 164
174 # Both should be able to login with the same password
175 assert self.user_manager.login("user1", "mypassword") == True
176 self.user_manager.logout()
176 self.user_manager.logout()
177 assert self.user_manager.login("user2", "mypassword") == True
178
63 core = importlib.import_module('distutils.core')
64 assert '_distutils' in core.__file__, core.__file__
65 assert 'setuptools._distutils.log' not in sys.modules
64 assert '_distutils' in core.__file__, core.__file__ 65 assert 'setuptools._distutils.log' not in sys.modules 66
171 f_locals.update(vars) 172 return eval(code, self.f_globals, f_locals) 173
229 source = self.frame.code.fullsource 230 assert source is not None 231 return source.getstatement(self.lineno)
292 tbh = maybe_ns_dct["__tracebackhide__"] 293 except Exception: 294 pass 295 else:
491 """ 492 assert ( 493 exception.__traceback__ 494 ), "Exceptions passed to ExcInfo.from_exception(...) must have a non-None __traceback__." 495 exc_info = (type(exception), exception, exception.__traceback__)
530 tup = sys.exc_info() 531 assert tup[0] is not None, "no current exception" 532 assert tup[1] is not None, "no current exception"
531 assert tup[0] is not None, "no current exception" 532 assert tup[1] is not None, "no current exception" 533 assert tup[2] is not None, "no current exception"
532 assert tup[1] is not None, "no current exception" 533 assert tup[2] is not None, "no current exception" 534 exc_info = (tup[0], tup[1], tup[2])
543 """Fill an unfilled ExceptionInfo created with ``for_later()``.""" 544 assert self._excinfo is None, "ExceptionInfo was already filled" 545 self._excinfo = exc_info
549 """The exception class.""" 550 assert ( 551 self._excinfo is not None 552 ), ".type can only be used after the context manager exits" 553 return self._excinfo[0]
557 """The exception value.""" 558 assert ( 559 self._excinfo is not None 560 ), ".value can only be used after the context manager exits" 561 return self._excinfo[1]
565 """The exception raw traceback.""" 566 assert ( 567 self._excinfo is not None 568 ), ".tb can only be used after the context manager exits" 569 return self._excinfo[2]
573 """The type name of the exception.""" 574 assert ( 575 self._excinfo is not None 576 ), ".typename can only be used after the context manager exits" 577 return self.type.__name__
713 msg += "\n Did you mean to `re.escape()` the regex?" 714 assert re.search(regexp, value), msg 715 # Return True to allow for "assert excinfo.match()".
206 end = block_finder.last + start 207 except Exception: 208 pass 209
74 file = colorama.AnsiToWin32(file).stream 75 assert file is not None 76 self._file = file
398 try: 399 import pickle 400
841 target = target.join(self.basename) 842 assert self != target 843 copychunked(self, target)
876 f = self.open("wb")
877 import pickle
878
1125 modfile = mod.__file__
1126 assert modfile is not None
1127 if modfile[-4:] in (".pyc", ".pyo"):
1154 with open(str(self), "rb") as f: 1155 exec(f.read(), mod.__dict__) 1156 except BaseException:
1165 """ 1166 from subprocess import Popen, PIPE 1167
1169 popen_opts.pop("stderr", None)
1170 proc = Popen(
1171 [str(self)] + [str(arg) for arg in argv],
1172 **popen_opts,
1173 stdout=PIPE,
1174 stderr=PIPE,
1175 )
1176 stdout: str | bytes
1403 raise 1404 except Exception: # this might be error.Error, WindowsError ... 1405 pass 1406 if is_garbage(path):
1410 raise 1411 except Exception: # this might be error.Error, WindowsError ... 1412 pass 1413
146 def exec_module(self, module: types.ModuleType) -> None: 147 assert module.__spec__ is not None 148 assert module.__spec__.origin is not None
147 assert module.__spec__ is not None 148 assert module.__spec__.origin is not None 149 fn = Path(module.__spec__.origin)
185 state.trace(f"found cached rewritten pyc for {fn}")
186 exec(co, module.__dict__)
187
403 try: 404 co = marshal.load(fp) 405 except Exception as e:
563 nonlocal depth, lines, assert_lineno, seen_lines
564 assert assert_lineno is not None
565 ret[assert_lineno] = "".join(lines).rstrip().rstrip("\\")
757 continue 758 assert isinstance(node, ast.AST) 759 for name, field in ast.iter_fields(node):
857 """Handle expressions we don't have custom code for.""" 858 assert isinstance(node, ast.expr) 859 res = self.assign(node)
874 # TODO: This assert should not be needed. 875 assert self.module_path is not None 876 warnings.warn_explicit(
94 else: 95 assert line[0] in ["~", ">"] 96 stack[-1] += 1
98 result.append(" " * indent + line[1:])
99 assert len(stack) == 1
100 return result
462 else: 463 assert False 464
301 self.active = any(config.getoption(key) for key in active_keys)
302 assert config.cache
303 self.lastfailed: Dict[str, bool] = config.cache.get("cache/lastfailed", {})
401
402 assert config.cache is not None
403 saved_lastfailed = config.cache.get("cache/lastfailed", {})
413 self.active = config.option.newfirst
414 assert config.cache is not None
415 self.cached_nodeids = set(config.cache.get("cache/nodeids", []))
449
450 assert config.cache is not None
451 config.cache.set("cache/nodeids", sorted(self.cached_nodeids))
542 """ 543 assert request.config.cache is not None 544 return request.config.cache
549 if config.option.verbose > 0 or config.getini("cache_dir") != ".pytest_cache":
550 assert config.cache is not None
551 cachedir = config.cache._cachedir
565 566 assert config.cache is not None 567
181 def getvalue(self) -> str:
182 assert isinstance(self.buffer, io.BytesIO)
183 return self.buffer.getvalue().decode("UTF-8")
368 def _assert_state(self, op: str, states: Tuple[str, ...]) -> None:
369 assert (
370 self._state in states
371 ), "cannot {} in state {!r}: expected one of {}".format(
372 op, self._state, ", ".join(states)
373 )
374
424 self._assert_state("snap", ("started", "suspended"))
425 assert isinstance(self.tmpfile, CaptureIO)
426 res = self.tmpfile.getvalue()
488 def _assert_state(self, op: str, states: Tuple[str, ...]) -> None:
489 assert (
490 self._state in states
491 ), "cannot {} in state {!r}: expected one of {}".format(
492 op, self._state, ", ".join(states)
493 )
494
638 if out: 639 assert self.out is not None 640 self.out.writeorg(out)
641 if err: 642 assert self.err is not None 643 self.err.writeorg(err)
662 if self._in_suspended: 663 assert self.in_ is not None 664 self.in_.resume()
748 def start_global_capturing(self) -> None: 749 assert self._global_capturing is None 750 self._global_capturing = _get_multicapture(self._method)
778 def read_global_capture(self) -> CaptureResult[str]: 779 assert self._global_capturing is not None 780 return self._global_capturing.readouterr()
434 def assert_never(value: NoReturn) -> NoReturn:
435 assert False, f"Unhandled value: {value} ({type(value).__name__})"
354 # abuse typeguard from importlib to avoid massive method type union thats lacking a alias
355 assert inspect.isroutine(method)
356 known_marks: set[str] = {m.name for m in getattr(method, "pytestmark", [])}
432 ) 433 except Exception: 434 pass 435 self.trace.root.setwriter(err.write)
642 except Exception as e: 643 assert e.__traceback__ is not None 644 exc_info = (type(e), e, e.__traceback__)
653 if dirpath in path.parents or path == dirpath: 654 assert mod not in mods 655 mods.append(mod)
765 # _pytest prefix. 766 assert isinstance(modname, str), ( 767 "module name as text required, got %r" % modname 768 ) 769 if self.is_blocked(modname) or self.get_plugin(modname) is not None:
1048 def _do_configure(self) -> None: 1049 assert not self._configured 1050 self._configured = True
1067 ) 1068 assert terminalreporter is not None 1069 return terminalreporter._tw
1418 # Parse given cmdline arguments into this config object. 1419 assert ( 1420 self.args == [] 1421 ), "can only parse cmdline args at most once per Config object" 1422 self.hook.pytest_addhooks.call_historic(
1483 x = self.getini(name) 1484 assert isinstance(x, list) 1485 x.append(line) # modifies the cached list inline
1539 # TODO: This assert is probably not valid in all cases. 1540 assert self.inipath is not None 1541 dp = self.inipath.parent
1568 return None 1569 assert mod.__file__ is not None 1570 modpath = Path(mod.__file__).parent
1662 try: 1663 assert False 1664 except AssertionError:
207 """ 208 assert type in (None, "string", "paths", "pathlist", "args", "linelist", "bool") 209 self._inidict[name] = (help, type, default)
60 else: 61 assert fspath_value is not None 62 path_value = Path(fspath_value)
208 )
209 assert rootdir is not None
210 return rootdir, inipath, inicfg or {}
169 if cls._recursive_debug == 0: 170 assert cls._config is not None 171 tw = _pytest.config.create_terminal_writer(cls._config)
184 ) 185 assert capman is not None 186 capman.resume()
188 tw.sep(">", "PDB continue")
189 assert cls._pluginmanager is not None
190 cls._pluginmanager.hook.pytest_leave_pdb(config=cls._config, pdb=self)
295 sys.stdout.write(err) 296 assert call.excinfo is not None 297
381 else: 382 assert excinfo._excinfo is not None 383 return excinfo._excinfo[2]
290 def runtest(self) -> None: 291 assert self.dtest is not None 292 assert self.runner is not None
291 assert self.dtest is not None 292 assert self.runner is not None 293 _check_all_skipped(self.dtest)
347 if lineno is not None: 348 assert failure.test.docstring is not None 349 lines = failure.test.docstring.splitlines(False)
350 # add line numbers to the left of the error message 351 assert test.lineno is not None 352 lines = [
378 def reportinfo(self) -> Tuple[Union["os.PathLike[str]", str], Optional[int], str]: 379 assert self.dtest is not None 380 return self.path, self.dtest.lineno, "[doctest] %s" % self.name
172 for argname, argvalue in callspec.funcargs.items(): 173 assert argname not in callspec.params 174 callspec.params[argname] = argvalue
195 if node is None: 196 assert scope is Scope.Class and isinstance( 197 collector, _pytest.python.Module 198 ) 199 # Use module-level collector for class-scope (for now).
241 the specified scope.""" 242 assert scope is not Scope.Function 243 try:
452 node = self._pyfuncitem
453 assert node, 'Could not obtain a node for scope "{}" for function {!r}'.format(
454 scope, self._pyfuncitem
455 )
456 return node
463 # not known at parsing/collection time. 464 assert self._pyfuncitem.parent is not None 465 parentid = self._pyfuncitem.parent.nodeid
585 fixturedef = self._get_active_fixturedef(argname)
586 assert fixturedef.cached_result is not None, (
587 f'The fixture value for "{argname}" is not available. '
588 "This can happen when the fixture has already been torn down."
589 )
590 return fixturedef.cached_result[0]
1047 # PseudoFixtureDef is only for "request". 1048 assert isinstance(fixturedef, FixtureDef) 1049 fixturedef.addfinalizer(functools.partial(self.finish, request=request))
1065 self.finish(request) 1066 assert self.cached_result is None 1067
1114 fixdef = request._get_active_fixturedef(argname) 1115 assert fixdef.cached_result is not None 1116 result, arg_cache_key, exc = fixdef.cached_result
1125 exc_info = sys.exc_info() 1126 assert exc_info[0] is not None 1127 if isinstance(
1631 else: 1632 assert isinstance(node_or_obj, nodes.Node) 1633 holderobj = cast(object, node_or_obj.obj) # type: ignore[attr-defined]
1633 holderobj = cast(object, node_or_obj.obj) # type: ignore[attr-defined] 1634 assert isinstance(node_or_obj.nodeid, str) 1635 nodeid = node_or_obj.nodeid
165 ) 166 assert reporter is not None 167 tw = reporter._tw
12 import re 13 import xml.etree.ElementTree as ET 14 from datetime import datetime
200 else: 201 assert report.longrepr is not None 202 reprcrash: Optional[ReprFileLocation] = getattr(
213 # msg = str(report.longrepr.reprtraceback.extraline)
214 assert report.longrepr is not None
215 self._add_simple("error", "collection failure", str(report.longrepr))
220 def append_error(self, report: TestReport) -> None: 221 assert report.longrepr is not None 222 reprcrash: Optional[ReprFileLocation] = getattr(
244 else: 245 assert isinstance(report.longrepr, tuple) 246 filename, lineno, skipreason = report.longrepr
3 import shlex 4 import subprocess 5 from pathlib import Path
400 # TODO: This assert is probably not valid in all cases. 401 assert self.inipath is not None 402 dp = self.inipath.parent
115 116 assert self._fmt is not None 117 levelname_fmt_match = self.LEVELNAME_FMT_REGEX.search(self._fmt)
663 # Guaranteed by `_log_cli_enabled()`.
664 assert terminal_reporter is not None
665 capture_manager = config.pluginmanager.get_plugin("capturemanager")
582 ) -> Sequence[nodes.Collector]:
583 assert (
584 fspath.is_file()
585 ), "{!r} is not a file (isdir={!r}, exists={!r}, islink={!r})".format(
586 fspath, fspath.is_dir(), fspath.exists(), fspath.is_symlink()
587 )
588 ihook = self.gethookproxy(fspath)
725 if argpath.is_dir():
726 assert not names, f"invalid arg {(argpath, names)!r}"
727
756 else: 757 assert argpath.is_file() 758
840 else: 841 assert isinstance(node, nodes.Collector) 842 rep = collect_one_node(node)
226 """
227 ret: bool = eval(self.code, {"__builtins__": {}}, MatcherAdapter(matcher))
228 return ret
88 else: 89 assert isinstance(marks, collections.abc.Collection) 90
241 """ 242 assert self.name == other.name 243
415 """ 416 assert isinstance(mark, Mark), mark 417 # Always reassign name to avoid updating pytestmark in a reference that
121 else: 122 assert fspath is not None 123 return Path(fspath)
234 if nodeid is not None: 235 assert "::()" not in nodeid 236 self._nodeid = nodeid
302 path, lineno = get_fslocation_from_item(self) 303 assert lineno is not None 304 warnings.warn_explicit(
433 current = current.parent 434 assert current is None or isinstance(current, cls) 435 return current
597 if isinstance(path_or_parent, Node): 598 assert parent is None 599 parent = cast(FSCollector, path_or_parent)
600 elif isinstance(path_or_parent, Path): 601 assert path is None 602 path = path_or_parent
617 if session is None: 618 assert parent is not None 619 session = parent.session
781 relfspath = self.session._node_location_to_relpath(path) 782 assert type(location[2]) is str 783 return (relfspath, location[1], location[2])
82 response: str = (
83 urlopen(url, data=urlencode(params).encode("ascii")).read().decode("utf-8")
84 )
107 s = file.getvalue() 108 assert len(s) 109 pastebinurl = create_new_paste(s)
223 current_symlink.symlink_to(link_to) 224 except Exception: 225 pass 226
236 new_path.mkdir(mode=mode) 237 except Exception: 238 pass 239 else:
406 return p 407 assert e is not None 408 raise e
757 """ 758 assert isinstance(directory, Path) 759 assert isinstance(dest, Path)
758 assert isinstance(directory, Path) 759 assert isinstance(dest, Path) 760 if dest == directory:
782 """Recursively copy a source directory to target.""" 783 assert source.is_dir() 784 for entry in visit(source, recurse=lambda entry: not entry.is_symlink()):
13 import shutil 14 import subprocess 15 import sys
126 def get_open_files(self) -> List[Tuple[str, str]]:
127 out = subprocess.run(
128 ("lsof", "-Ffn0", "-p", str(os.getpid())),
129 stdout=subprocess.PIPE,
130 stderr=subprocess.DEVNULL,
131 check=True,
132 text=True,
133 encoding=locale.getpreferredencoding(False),
134 ).stdout
135
159 try:
160 subprocess.run(("lsof", "-v"), check=True)
161 except (OSError, subprocess.CalledProcessError):
291 print("NAMEMATCH", name, call)
292 if eval(check, backlocals, call.__dict__):
293 print("CHECKERMATCH", repr(check), "->", call)
314 values = self.getcalls(name) 315 assert len(values) == 1, (name, values) 316 return values[0]
435 if rep.when == "call": 436 assert isinstance(rep, TestReport) 437 passed.append(rep)
440 else:
441 assert rep.failed, f"Unexpected outcome: {rep!r}"
442 failed.append(rep)
796 ret = p 797 assert ret is not None 798 return ret
955 for extra_element in self._request.node.iter_markers("pytester_example_path"):
956 assert extra_element.args
957 example_dir = example_dir.joinpath(*extra_element.args)
1000 session = Session.from_config(config) 1001 assert "::" not in str(arg) 1002 p = Path(os.path.abspath(arg))
1194 1195 assert reprec.ret is not None 1196 res = RunResult(
1279 return item
1280 assert 0, "{!r} item not found in module:\n{}\nitems: {}".format(
1281 funcname, source, items
1282 )
1283
1317 path = self.path.joinpath(source) 1318 assert not withinit, "not supported for paths" 1319 else:
1372 1373 popen = subprocess.Popen(cmdargs, stdout=stdout, stderr=stderr, **kw) 1374 if stdin is self.CLOSE_STDIN:
1374 if stdin is self.CLOSE_STDIN: 1375 assert popen.stdin is not None 1376 popen.stdin.close()
1377 elif isinstance(stdin, bytes): 1378 assert popen.stdin is not None 1379 popen.stdin.write(stdin)
34 expected = {"passed": passed, "skipped": skipped, "failed": failed}
35 assert obtained == expected, outcomes
36
74 expected["deselected"] = deselected 75 assert obtained == expected
238 ) -> Union[None, nodes.Item, nodes.Collector, List[Union[nodes.Item, nodes.Collector]]]: 239 assert isinstance(collector, (Class, Module)), type(collector) 240 # Nothing was collected elsewhere, let's do it here.
326 # TODO: Improve the type of `parent` such that assert/ignore aren't needed. 327 assert self.parent is not None 328 obj = self.parent.obj # type: ignore[attr-defined]
354 file_path = sys.modules[obj.__module__].__file__
355 assert file_path is not None
356 if file_path.endswith(".pyc"):
362 modpath = self.getmodpath() 363 assert isinstance(lineno, int) 364 return path, lineno, modpath
474 modulecol = self.getparent(Module) 475 assert modulecol is not None 476 module = modulecol.obj
716 ) -> Sequence[nodes.Collector]:
717 assert (
718 fspath.is_file()
719 ), "{!r} is not a file (isdir={!r}, exists={!r}, islink={!r})".format(
720 fspath, fspath.is_dir(), fspath.exists(), fspath.is_symlink()
721 )
722 ihook = self.session.gethookproxy(fspath)
807 if hasinit(self.obj): 808 assert self.parent is not None 809 self.warn(
817 elif hasnew(self.obj): 818 assert self.parent is not None 819 self.warn(
1602 for _, fixturedefs in sorted(info.name2fixturedefs.items()): 1603 assert fixturedefs is not None 1604 if not fixturedefs:
1632 for argname, fixturedefs in fm._arg2fixturedefs.items(): 1633 assert fixturedefs is not None 1634 if not fixturedefs:
1776 def _getobj(self): 1777 assert self.parent is not None 1778 if isinstance(self.parent, Class):
987 fail(self.message) 988 assert self.excinfo is not None 989 if not issubclass(exc_type, self.expected_exception):
226 # record=True means it's None. 227 assert _list is not None 228 self._list = _list
326 # Remove "collect" from the Literal type -- only for collection calls. 327 assert when != "collect" 328 duration = call.duration
349 r = excinfo._getreprcrash() 350 assert ( 351 r is not None 352 ), "There should always be a traceback entry for skipping a test." 353 if excinfo.value._use_item_location:
354 path, line = item.reportinfo()[:2] 355 assert line is not None 356 longrepr = os.fspath(path), line + 1, r.message
462 return CollectReport._from_json(data)
463 assert False, "Unknown report_type unserialize data: {}".format(
464 data["$report_type"]
465 )
466 return None
501 def serialize_exception_longrepr(rep: BaseReport) -> Dict[str, Any]: 502 assert rep.longrepr is not None 503 # TODO: Investigate whether the duck typing is really necessary here.
173 sys.last_value = e 174 assert e.__traceback__ is not None 175 # Skip *this* frame
256 else:
257 assert False, f"Unhandled runtest hook case: {when}"
258 reraise: Tuple[Type[BaseException], ...] = (Exit,)
384 r_ = collector._repr_failure_py(call.excinfo, "line") 385 assert isinstance(r_, ExceptionChainRepr), repr(r_) 386 r = r_.reprcrash
386 r = r_.reprcrash 387 assert r 388 longrepr = (str(r.path), r.lineno, r.message)
392 if not hasattr(errorinfo, "toterminal"): 393 assert isinstance(errorinfo, str) 394 errorinfo = CollectErrorRepr(errorinfo)
484 for col, (finalizers, exc) in self.stack.items(): 485 assert col in needed_collectors, "previous item was not torn down properly" 486 if exc:
489 for col in needed_collectors[len(self.stack) :]: 490 assert col not in self.stack 491 # Push onto the stack.
503 """ 504 assert node and not isinstance(node, tuple) 505 assert callable(finalizer)
504 assert node and not isinstance(node, tuple) 505 assert callable(finalizer) 506 assert node in self.stack, (node, self.stack)
505 assert callable(finalizer) 506 assert node in self.stack, (node, self.stack) 507 self.stack[node][0].append(finalizer)
540 if nextitem is None: 541 assert not self.stack 542
115 condition_code = compile(condition, filename, "eval") 116 result = eval(condition_code, globals_) 117 except SyntaxError as exc:
270 elif call.excinfo and isinstance(call.excinfo.value, xfail.Exception): 271 assert call.excinfo.value.msg is not None 272 rep.wasxfail = "reason: " + call.excinfo.value.msg
49 if not session.config.getoption("stepwise"):
50 assert session.config.cache is not None
51 if hasattr(session.config, "workerinput"):
63 self.report_status = "" 64 assert config.cache is not None 65 self.cache: Cache = config.cache
107 self.lastfailed = report.nodeid 108 assert self.session is not None 109 self.session.shouldstop = (
638 def _is_last_item(self) -> bool: 639 assert self._session is not None 640 return len(self._progress_nodeids_reported) == self._session.testscollected
642 def pytest_runtest_logfinish(self, nodeid: str) -> None: 643 assert self._session 644 if self.verbosity <= 0 and self._show_progress_info:
663 def _get_progress_information_message(self) -> str: 664 assert self._session 665 collected = self._session.testscollected
898 excrepr = self._keyboardinterrupt_memo 899 assert excrepr is not None 900 assert excrepr.reprcrash is not None
899 assert excrepr is not None 900 assert excrepr.reprcrash is not None 901 msg = excrepr.reprcrash.message
1216 self._set_main_color() 1217 assert self._main_color 1218 assert self._known_types
1217 assert self._main_color 1218 assert self._known_types 1219 return self._main_color, self._known_types
1392 for event in skipped: 1393 assert event.longrepr is not None 1394 assert isinstance(event.longrepr, tuple), (event, event.longrepr)
1393 assert event.longrepr is not None 1394 assert isinstance(event.longrepr, tuple), (event, event.longrepr) 1395 assert len(event.longrepr) == 3, (event, event.longrepr)
1394 assert isinstance(event.longrepr, tuple), (event, event.longrepr) 1395 assert len(event.longrepr) == 3, (event, event.longrepr) 1396 fspath, lineno, reason = event.longrepr
1473 else: 1474 assert report.skipped 1475 assert isinstance(report.longrepr, tuple)
1474 assert report.skipped 1475 assert isinstance(report.longrepr, tuple) 1476 _, _, reason = report.longrepr
53 ) -> None: 54 assert self._old_hook is not None 55 threading.excepthook = self._old_hook
197 ) 198 assert basetemp is not None, basetemp 199 self._basetemp = basetemp
188 def _getobj(self): 189 assert self.parent is not None 190 # Unlike a regular Function in a Class, where `item.obj` returns
199 self._explicit_tearDown: Optional[Callable[[], None]] = None 200 assert self.parent is not None 201 self._testcase = self.parent.obj(self.name) # type: ignore[attr-defined]
306 307 assert self._testcase is not None 308
322 # We need to consider if the test itself is skipped, or the whole class. 323 assert isinstance(self.parent, UnitTestCase) 324 skipped = _is_skipped(self.obj) or _is_skipped(self.parent.obj)
55 ) -> None: 56 assert self._old_hook is not None 57 sys.unraisablehook = self._old_hook
44 # mypy can't infer that record=True means log is not None; help it. 45 assert log is not None 46
360 coro = task.get_coro() 361 assert coro is not None 362 try:
463 464 assert self._host_task is not None 465 host_task_state = _task_states.get(self._host_task)
511 if self._pending_uncancellations: 512 assert self._parent_scope is not None 513 assert self._parent_scope._pending_uncancellations is not None
512 assert self._parent_scope is not None 513 assert self._parent_scope._pending_uncancellations is not None 514 self._parent_scope._pending_uncancellations += (
804 task_state = _task_states[_task] 805 assert task_state.cancel_scope is not None 806 assert _task in task_state.cancel_scope._tasks
805 assert task_state.cancel_scope is not None 806 assert _task in task_state.cancel_scope._tasks 807 task_state.cancel_scope._tasks.remove(_task)
2125 coro = task.get_coro() 2126 assert coro is not None, "created TaskInfo from a completed Task" 2127 super().__init__(id(task), parent_id, task.get_name(), coro)
789 else: 790 assert total_tokens is not None 791 self.__original = trio.CapacityLimiter(total_tokens)
1143 stderr=stderr, 1144 shell=True, 1145 **kwargs, 1146 ) 1147 else: 1148 process = await trio.lowlevel.open_process( 1149 [convert_item(item) for item in command], 1150 stdin=stdin, 1151 stdout=stdout, 1152 stderr=stderr,
1358 root_task = current_root_task() 1359 assert root_task 1360 task_infos = [TrioTaskInfo(root_task)]
57 _selector = None 58 assert not self._selector.get_map(), ( 59 "selector still has registered file descriptors after shutdown" 60 ) 61
42 # Needed for mypy to assume self still has the __cm member 43 assert isinstance(self, ContextManagerMixin) 44 if self.__cm is not None:
81 # Needed for mypy to assume self still has the __cm member 82 assert isinstance(self, ContextManagerMixin) 83 if self.__cm is None:
128 # Needed for mypy to assume self still has the __cm member 129 assert isinstance(self, AsyncContextManagerMixin) 130 if self.__cm is not None:
173 ) -> _ExitT_co: 174 assert isinstance(self, AsyncContextManagerMixin) 175 if self.__cm is None:
489 else:
490 local_address = ("0.0.0.0", 0)
491
6 from os import PathLike 7 from subprocess import PIPE, CalledProcessError, CompletedProcess 8 from typing import IO, Any, Union, cast
473 with self._lock: 474 assert self._portal_cm 475 assert self._leases > 0
474 assert self._portal_cm 475 assert self._leases > 0 476 self._leases -= 1
61 if not _runner_leases: 62 assert _runner_stack is not None 63 _runner_stack.close()
246 async def send_eof(self) -> None: 247 tls_version = self.extra(TLSAttribute.tls_version) 248 match = re.match(r"TLSv(\d+)(?:\.(\d+))?", tls_version)
111 ) -> T_Retval: 112 import pickle 113
122 if fmt == FMT_PICKLED: 123 res = pickle.loads(res) 124
3 import os 4 import pickle 5 import subprocess
4 import pickle 5 import subprocess 6 import sys
85 86 retval = pickle.loads(pickled_response) 87 if status == b"EXCEPTION":
87 if status == b"EXCEPTION": 88 assert isinstance(retval, BaseException) 89 raise retval
209 try: 210 command, *args = pickle.load(stdin.buffer) 211 except EOFError:
135 # as that is the only way _proxied will be correctly set as a ClassDef. 136 assert isinstance(self, (nodes.Const, Generator, UnionType)) 137 else:
447 def __repr__(self) -> str: 448 assert self._proxied.parent, "Expected a parent node" 449 frame = self._proxied.parent.frame()
489 if self._proxied.name == "__new__": 490 assert self._proxied.parent, "Expected a parent node" 491 qname = self._proxied.parent.frame().qname()
516 const = nodes.const_factory(value) 517 assert not isinstance(const, nodes.EmptyNode) 518 yield const
472 cls = scoped_nodes.get_wrapping_class(scope) 473 assert cls is not None 474 if not node.args:
985 ) -> Iterator[CopyResult]: 986 assert isinstance(node.func, nodes.Attribute) 987 inferred_orig, inferred_copy = itertools.tee(node.func.expr.infer(context=context))
1016 call = arguments.CallSite.from_call(node, context=context) 1017 assert isinstance(node.func, (nodes.Attribute, nodes.AssignAttr, nodes.DelAttr)) 1018
52 res = safe_infer(cache_info) 53 assert res is not None 54 yield res
77 obj = getattr(parent, name) 78 except Exception: # pylint: disable=broad-except 79 # gi.module.IntrospectionModule.__getattr__() can raise all kinds of things 80 # like ValueError, TypeError, NotImplementedError, RepositoryError, etc 81 continue 82
191 warnings.simplefilter("ignore", PyGIWarning)
192 except Exception: # pylint:disable=broad-except
193 pass
194
241 gi.require_version(node.args[0].value, node.args[1].value) 242 except Exception: # pylint:disable=broad-except 243 pass 244
43 """ 44 assert isinstance(node.args.args, list) 45 del node.args.args[0]
203 ) 204 assert isinstance(func, nodes.NodeNG) 205 try:
109 node = self._manager.visit_transforms(node) 110 assert isinstance(node, nodes.Module) 111 return node
227 228 assert node.parent # It should always default to the module 229 module = node.root()
335 real_expr = node.args[0] 336 assert node.parent 337 real_expr.parent = node.parent
103 if myframe is frame and mystmt and mystmt.fromlineno is not None: 104 assert mystmt.fromlineno is not None, mystmt 105 mylineno = mystmt.fromlineno + offset
296 return module 297 except Exception: # pylint: disable=broad-except 298 continue 299 return None
355 ret = modastroid.getattr(klass.__name__)[0] 356 assert isinstance(ret, nodes.ClassDef) 357 return ret
292 continue 293 assert is_package_cb is not None 294 if is_package_cb(pathname, modpath[:-1]):
420 if parts[0] == "": 421 assert ( 422 context_file is not None 423 ), "explicit relative import, but no context_file?" 424 path = [] # prevent resolving the import non-relatively
428 starti += 1 429 assert ( 430 context_file is not None 431 ), "explicit relative import, but no context_file?" 432 context_file = os.path.dirname(context_file)
632 """ 633 assert modpath 634 location = None
645 try: 646 assert found_spec.location is not None 647 location = get_source_file(found_spec.location)
656 elif found_spec.type == spec.ModuleType.PKG_DIRECTORY: 657 assert found_spec.location is not None 658 location = _has_init(found_spec.location)
1903 1904 assert retval is not None 1905 return retval # it was all the same value
5668 value_type = type(value) 5669 assert not issubclass(value_type, NodeNG) 5670
316 if not (parent := self.parent): 317 assert isinstance(self, nodes.Module) 318 return self
321 parent = parent.parent 322 assert isinstance(parent, nodes.Module) 323 return parent
465 """ 466 assert self.parent 467 self.parent.set_local(name, stmt)
1580 if isinstance(caller.args, node_classes.Arguments): 1581 assert caller.args.args is not None 1582 metaclass = next(caller.args.args[0].infer(context), None)
305 if context:
306 assert (
307 context.callcontext
308 ), "CallContext should be set before inferring call result"
309 current_passed_keywords = {
499 child = self._done[member] 500 assert isinstance(child, nodes.ClassDef) 501 else:
616 proxy = astroid_builtin.getattr(cls.__name__)[0] 617 assert isinstance(proxy, nodes.ClassDef) 618 if cls in (dict, list, set, tuple):
127 if isinstance(parent, nodes.AsyncFunctionDef): 128 search_token = "async" 129 elif isinstance(parent, nodes.FunctionDef):
129 elif isinstance(parent, nodes.FunctionDef): 130 search_token = "def" 131 else:
131 else: 132 search_token = "class" 133
498 else: 499 assert node.parent 500 assert node.name
499 assert node.parent 500 assert node.name 501 node.parent.set_local(node.name, node)
592 # save argument names in locals: 593 assert newnode.parent 594 if vararg:
27 token_model = self.server.token_model 28 if token_type_hint == "access_token": 29 rv = _query_access_token(token_model, token)
29 rv = _query_access_token(token_model, token) 30 elif token_type_hint == "refresh_token": 31 rv = _query_refresh_token(token_model, token)
23 24 def __init__( 25 self, 26 token_endpoint, 27 issuer, 28 subject, 29 audience=None, 30 grant_type=None, 31 claims=None, 32 token_placement="header", 33 scope=None, 34 **kwargs, 35 ): 36 client_kwargs = extract_client_kwargs(kwargs) 37 httpx.AsyncClient.__init__(self, **client_kwargs) 38 39 _AssertionClient.__init__( 40 self, 41 session=None, 42 token_endpoint=token_endpoint, 43 issuer=issuer, 44 subject=subject, 45 audience=audience, 46 grant_type=grant_type, 47 claims=claims, 48 token_placement=token_placement, 49 scope=scope, 50 **kwargs, 51 ) 52
80
81 def __init__(
82 self,
83 token_endpoint,
84 issuer,
85 subject,
86 audience=None,
87 grant_type=None,
88 claims=None,
89 token_placement="header",
90 scope=None,
91 **kwargs,
92 ):
93 client_kwargs = extract_client_kwargs(kwargs)
94 # app keyword was dropped!
95 app_value = client_kwargs.pop("app", None)
96 if app_value is not None:
97 client_kwargs["transport"] = httpx.WSGITransport(app=app_value)
98
99 httpx.Client.__init__(self, **client_kwargs)
100
101 _AssertionClient.__init__(
102 self,
103 session=self,
104 token_endpoint=token_endpoint,
105 issuer=issuer,
106 subject=subject,
107 audience=audience,
108 grant_type=grant_type,
109 claims=claims,
110 token_placement=token_placement,
111 scope=scope,
112 **kwargs,
113 )
114
69 70 def __init__( 71 self, 72 client_id=None, 73 client_secret=None, 74 token_endpoint_auth_method=None, 75 revocation_endpoint_auth_method=None, 76 scope=None, 77 redirect_uri=None, 78 token=None, 79 token_placement="header", 80 update_token=None, 81 leeway=60, 82 **kwargs, 83 ): 84 # extract httpx.Client kwargs 85 client_kwargs = self._extract_session_request_params(kwargs) 86 httpx.AsyncClient.__init__(self, **client_kwargs) 87 88 # We use a Lock to synchronize coroutines to prevent 89 # multiple concurrent attempts to refresh the same token 90 self._token_refresh_lock = Lock() 91 92 _OAuth2Client.__init__( 93 self, 94 session=None, 95 client_id=client_id, 96 client_secret=client_secret, 97 token_endpoint_auth_method=token_endpoint_auth_method, 98 revocation_endpoint_auth_method=revocation_endpoint_auth_method, 99 scope=scope, 100 redirect_uri=redirect_uri, 101 token=token, 102 token_placement=token_placement, 103 update_token=update_token, 104 leeway=leeway, 105 **kwargs, 106 ) 107
217
218 def __init__(
219 self,
220 client_id=None,
221 client_secret=None,
222 token_endpoint_auth_method=None,
223 revocation_endpoint_auth_method=None,
224 scope=None,
225 redirect_uri=None,
226 token=None,
227 token_placement="header",
228 update_token=None,
229 **kwargs,
230 ):
231 # extract httpx.Client kwargs
232 client_kwargs = self._extract_session_request_params(kwargs)
233 # app keyword was dropped!
234 app_value = client_kwargs.pop("app", None)
235 if app_value is not None:
236 client_kwargs["transport"] = httpx.WSGITransport(app=app_value)
237
238 httpx.Client.__init__(self, **client_kwargs)
239
240 _OAuth2Client.__init__(
241 self,
242 session=self,
243 client_id=client_id,
244 client_secret=client_secret,
245 token_endpoint_auth_method=token_endpoint_auth_method,
246 revocation_endpoint_auth_method=revocation_endpoint_auth_method,
247 scope=scope,
248 redirect_uri=redirect_uri,
249 token=token,
250 token_placement=token_placement,
251 update_token=update_token,
252 **kwargs,
253 )
254
31 32 def __init__( 33 self, 34 token_endpoint, 35 issuer, 36 subject, 37 audience=None, 38 grant_type=None, 39 claims=None, 40 token_placement="header", 41 scope=None, 42 default_timeout=None, 43 leeway=60, 44 **kwargs, 45 ): 46 Session.__init__(self) 47 self.default_timeout = default_timeout 48 update_session_configure(self, kwargs) 49 AssertionClient.__init__( 50 self, 51 session=self, 52 token_endpoint=token_endpoint, 53 issuer=issuer, 54 subject=subject, 55 audience=audience, 56 grant_type=grant_type, 57 claims=claims, 58 token_placement=token_placement, 59 scope=scope, 60 leeway=leeway, 61 **kwargs, 62 ) 63
90 91 def __init__( 92 self, 93 client_id=None, 94 client_secret=None, 95 token_endpoint_auth_method=None, 96 revocation_endpoint_auth_method=None, 97 scope=None, 98 state=None, 99 redirect_uri=None, 100 token=None, 101 token_placement="header", 102 update_token=None, 103 leeway=60, 104 default_timeout=None, 105 **kwargs, 106 ): 107 Session.__init__(self) 108 self.default_timeout = default_timeout 109 update_session_configure(self, kwargs) 110 111 OAuth2Client.__init__( 112 self, 113 session=self, 114 client_id=client_id, 115 client_secret=client_secret, 116 token_endpoint_auth_method=token_endpoint_auth_method, 117 revocation_endpoint_auth_method=revocation_endpoint_auth_method, 118 scope=scope, 119 state=state, 120 redirect_uri=redirect_uri, 121 token=token, 122 token_placement=token_placement, 123 update_token=update_token, 124 leeway=leeway, 125 **kwargs, 126 ) 127
49 q = session.query(token_model) 50 if token_type_hint == "access_token": 51 return q.filter_by(access_token=token).first()
51 return q.filter_by(access_token=token).first() 52 elif token_type_hint == "refresh_token": 53 return q.filter_by(refresh_token=token).first()
328 "RSAES OAEP using default parameters", 329 padding.OAEP(padding.MGF1(hashes.SHA1()), hashes.SHA1(), None), 330 ),
328 "RSAES OAEP using default parameters", 329 padding.OAEP(padding.MGF1(hashes.SHA1()), hashes.SHA1(), None), 330 ),
143 144 rv = random.choice(key.keys) 145 # use side effect to add kid value into header
156 if not kid: 157 rv = random.choice(keys) 158 header["kid"] = rv["kid"]
149 if body and headers.get("Content-Type") != CONTENT_TYPE_FORM_URLENCODED:
150 oauth_body_hash = base64.b64encode(hashlib.sha1(body).digest())
151 oauth_params.append(("oauth_body_hash", oauth_body_hash.decode("utf-8")))
14 ) 15 return key.sign(msg, padding.PKCS1v15(), hashes.SHA1()) 16
20 try: 21 key.verify(sig, msg, padding.PKCS1v15(), hashes.SHA1()) 22 return True
88
89 DEFAULT_TOKEN_TYPE = "bearer"
90 SIGN_METHODS = {"bearer": add_bearer_token}
91 92 def __init__(self, token, token_placement="header", client=None): 93 self.token = OAuth2Token.from_dict(token) 94 self.token_placement = token_placement 95 self.client = client 96 self.hooks = set() 97
52
53 def __init__(
54 self,
55 session,
56 client_id=None,
57 client_secret=None,
58 token_endpoint_auth_method=None,
59 revocation_endpoint_auth_method=None,
60 scope=None,
61 state=None,
62 redirect_uri=None,
63 code_challenge_method=None,
64 token=None,
65 token_placement="header",
66 update_token=None,
67 leeway=60,
68 **metadata,
69 ):
70 self.session = session
71 self.client_id = client_id
72 self.client_secret = client_secret
73 self.state = state
74
75 if token_endpoint_auth_method is None:
76 if client_secret:
77 token_endpoint_auth_method = "client_secret_basic"
78 else:
79 token_endpoint_auth_method = "none"
80
81 self.token_endpoint_auth_method = token_endpoint_auth_method
82
83 if revocation_endpoint_auth_method is None:
84 if client_secret:
85 revocation_endpoint_auth_method = "client_secret_basic"
86 else:
87 revocation_endpoint_auth_method = "none"
88
89 self.revocation_endpoint_auth_method = revocation_endpoint_auth_method
90
91 self.scope = scope
92 self.redirect_uri = redirect_uri
93 self.code_challenge_method = code_challenge_method
94
95 self.token_auth = self.token_auth_class(token, token_placement, self)
96 self.update_token = update_token
97
98 token_updater = metadata.pop("token_updater", None)
99 if token_updater:
100 raise ValueError(
101 "update token has been redesigned, checkout the documentation"
102 )
103
104 self.metadata = metadata
105
106 self.compliance_hook = {
107 "access_token_response": set(),
108 "refresh_token_request": set(),
109 "refresh_token_response": set(),
110 "revoke_token_request": set(),
111 "introspect_token_request": set(),
112 }
113 self._auth_methods = {}
114
115 self.leeway = leeway
116
76 if client_secret: 77 token_endpoint_auth_method = "client_secret_basic" 78 else:
78 else: 79 token_endpoint_auth_method = "none" 80
18 19 TOKEN_TYPE = "bearer" 20
12 class BearerTokenValidator(TokenValidator): 13 TOKEN_TYPE = "bearer" 14
16 17 def __init__( 18 self, 19 session, 20 token_endpoint, 21 issuer, 22 subject, 23 audience=None, 24 grant_type=None, 25 claims=None, 26 token_placement="header", 27 scope=None, 28 leeway=60, 29 **kwargs, 30 ): 31 self.session = session 32 33 if audience is None: 34 audience = token_endpoint 35 36 self.token_endpoint = token_endpoint 37 38 if grant_type is None: 39 grant_type = self.DEFAULT_GRANT_TYPE 40 41 self.grant_type = grant_type 42 43 # https://tools.ietf.org/html/rfc7521#section-5.1 44 self.issuer = issuer 45 self.subject = subject 46 self.audience = audience 47 self.claims = claims 48 self.scope = scope 49 if self.token_auth_class is not None: 50 self.token_auth = self.token_auth_class(None, token_placement, self) 51 self._kwargs = kwargs 52 self.leeway = leeway 53
31 class JWTBearerTokenValidator(BearerTokenValidator): 32 TOKEN_TYPE = "bearer" 33 token_cls = JWTBearerToken
66 if "token_endpoint_auth_method" not in self:
67 self["token_endpoint_auth_method"] = "client_secret_basic"
68 self._validate_claim_value("token_endpoint_auth_method")
6 class IntrospectTokenValidator(TokenValidator): 7 TOKEN_TYPE = "bearer" 8
17 import shutil 18 import subprocess 19 import sys
104 try: 105 output = subprocess.check_output(bandit_command) 106 except subprocess.CalledProcessError as e:
72 prefix = deepgetattr(node, "value.id") 73 except Exception: 74 # NOTE(tkelsey): degrade gracefully when we can't get the fully 75 # qualified name for an attr, just return its base name. 76 pass 77
37 import sys 38 from xml.etree import cElementTree as ET 39
45 def hardcoded_bind_all_interfaces(context): 46 if context.string_val == "0.0.0.0": 47 return bandit.Issue(
61 if name == "hardcoded_tmp_directory":
62 return {"tmp_dirs": ["/tmp", "/var/tmp", "/dev/shm"]}
63
61 if name == "hardcoded_tmp_directory":
62 return {"tmp_dirs": ["/tmp", "/var/tmp", "/dev/shm"]}
63
61 if name == "hardcoded_tmp_directory":
62 return {"tmp_dirs": ["/tmp", "/var/tmp", "/dev/shm"]}
63
71 else: 72 tmp_dirs = ["/tmp", "/var/tmp", "/dev/shm"] 73
71 else: 72 tmp_dirs = ["/tmp", "/var/tmp", "/dev/shm"] 73
71 else: 72 tmp_dirs = ["/tmp", "/var/tmp", "/dev/shm"] 73
551 552 assert sys.version_info >= (3, 9), "Black requires Python 3.9+" 553 if sys.version_info[:3] == (3, 12, 5):
674 else: 675 assert root is not None # root is only None if code is not None 676 try:
749
750 assert root.is_absolute(), f"INTERNAL ERROR: `root` must be absolute but is {root}"
751 using_default_exclude = exclude is None
1536 orig_name = child.children[0] 1537 assert isinstance(orig_name, Leaf), "Invalid syntax parsing imports" 1538 assert orig_name.type == token.NAME, "Invalid syntax parsing imports"
1537 assert isinstance(orig_name, Leaf), "Invalid syntax parsing imports" 1538 assert orig_name.type == token.NAME, "Invalid syntax parsing imports" 1539 yield orig_name.value
4 import os 5 import pickle 6 import sys
79 try:
80 data: dict[str, tuple[float, int, str]] = pickle.load(fobj)
81 file_data = {k: FileData(*v) for k, v in data.items()}
101 match = re.match(r"^(\s*)(\S.*|)$", full_line) 102 assert match 103 whitespace, line = match.groups()
298 parent = leaf.parent 299 assert parent is not None, "INTERNAL ERROR: fmt: on/off handling (prefix only)" 300
306 307 assert leaf_idx is not None, "INTERNAL ERROR: fmt: on/off handling (leaf index)" 308
422 423 assert parent is not None, "INTERNAL ERROR: fmt: on/off handling (1)" 424 assert first_idx is not None, "INTERNAL ERROR: fmt: on/off handling (2)"
423 assert parent is not None, "INTERNAL ERROR: fmt: on/off handling (1)" 424 assert first_idx is not None, "INTERNAL ERROR: fmt: on/off handling (2)" 425
341
342 assert root.is_absolute(), f"INTERNAL ERROR: `root` must be absolute but is {root}"
343 for child in paths:
343 for child in paths: 344 assert child.is_absolute() 345 root_relative_path = child.relative_to(root).as_posix()
195 """Create a randomly generated token that is n_chars characters long.""" 196 assert n_chars > 0 197 n_bytes = max(n_chars // 2 - 1, 1)
213 """ 214 assert magic 215 n_chars = len(magic)
352 for arg in args: 353 assert isinstance(arg, ast.Constant) and isinstance(arg.value, str) 354 str_args.append(arg.value)
442 """ 443 assert len(node.children) == 3 444 if maybe_make_parens_invisible_in_atom(
1573 if is_lpar_token(child): 1574 assert is_rpar_token(parent.children[-1]) 1575 # make parentheses invisible
363 ): 364 assert closing.prev_sibling is not None 365 assert closing.prev_sibling.type == syms.subscriptlist
364 assert closing.prev_sibling is not None 365 assert closing.prev_sibling.type == syms.subscriptlist 366 return False
653 if previous_def is not None: 654 assert self.previous_line is not None 655 if self.mode.is_pyi:
714 ) -> tuple[int, int]: 715 assert self.previous_line is not None 716
1011 1012 assert len(line.leaves) >= 2, "Stranded delimiter" 1013
30 def pretty(self) -> str:
31 assert self.name[:2] == "PY"
32 return f"Python {self.name[2]}.{self.name[3:]}"
203
204 assert p is not None, f"INTERNAL ERROR: hand-made leaf without parent: {leaf!r}"
205 if t == token.COLON and p.type not in {
362 if not prev: 363 assert p.parent is not None, "subscripts are always parented" 364 if p.parent.type == syms.subscriptlist:
392 prevp_parent = prevp.parent
393 assert prevp_parent is not None
394 if prevp.type == token.COLON and prevp_parent.type in {
816 def is_parent_function_or_class(node: Node) -> bool:
817 assert node.type in {syms.suite, syms.simple_stmt}
818 assert node.parent is not None
817 assert node.type in {syms.suite, syms.simple_stmt}
818 assert node.parent is not None
819 # Note this works for suites / simple_stmts in async def as well
1051 """Returns whether this leaf is part of a type annotation.""" 1052 assert leaf.parent is not None 1053 return get_annotation_type(leaf) is not None
95 # Choose the latest version when raising the actual parsing error. 96 assert len(errors) >= 1 97 exc = errors[max(errors)]
218 return
219 assert (
220 newline_leaf.type == NEWLINE
221 ), f"Unexpectedly found leaf.type={newline_leaf.type}"
222 # We need to find the furthest ancestor with the NEWLINE as the last
341 # hits that means there's a problem in the parser. 342 assert isinstance(node, Node) 343 # 1 will always be the correct index since before this function is
7 """Get the stored complete schema for black's settings.""" 8 assert tool_name == "black", "Only black is supported." 9
67 # https://www.python.org/dev/peps/pep-0257/#handling-docstring-indentation 68 assert docstring, "INTERNAL ERROR: Multiline docstrings cannot be empty" 69 lines = lines_with_leading_tabs_expanded(docstring)
130
131 assert (
132 0 <= quote_idx < len(string) - 1
133 ), f"{string!r} is missing a starting quote character (' or \")."
134 assert string[-1] in (
133 ), f"{string!r} is missing a starting quote character (' or \")."
134 assert string[-1] in (
135 "'",
136 '"',
137 ), f"{string!r} is missing an ending quote character (' or \")."
138 assert set(string[:quote_idx]).issubset(
137 ), f"{string!r} is missing an ending quote character (' or \")."
138 assert set(string[:quote_idx]).issubset(
139 set(STRING_PREFIX_CHARS)
140 ), f"{set(string[:quote_idx])} is NOT a subset of {set(STRING_PREFIX_CHARS)}."
141
145 match = STRING_PREFIX_RE.match(s)
146 assert match is not None, f"failed to match string {s!r}"
147 orig_prefix = match.group(1)
187 first_quote_pos = s.find(orig_quote)
188 assert first_quote_pos != -1, f"INTERNAL ERROR: Malformed string {s!r}"
189
330 else:
331 assert groups["N"], f"Unexpected match: {m}"
332 # \N{}
723 mark_idx = temp_string.find(BREAK_MARK) 724 assert ( 725 mark_idx >= 0 726 ), "Logic error while filling the custom string breakpoint cache." 727
1099 string_indices = match_result.ok()
1100 assert len(string_indices) == 1, (
1101 f"{self.__class__.__name__} should only find one match at a time, found"
1102 f" {len(string_indices)}"
1103 )
1104 string_idx = string_indices[0]
1297 parent = LL[0].parent 1298 assert parent is not None # For type checkers. 1299 prev_sibling = parent.prev_sibling
1481 LL = line.leaves
1482 assert len(string_indices) == 1, (
1483 f"{self.__class__.__name__} should only find one match at a time, found"
1484 f" {len(string_indices)}"
1485 )
1486 string_idx = string_indices[0]
1788 1789 assert is_valid_index(max_break_idx) 1790 assert_is_leaf_string(string)
2163 LL = line.leaves
2164 assert len(string_indices) == 1, (
2165 f"{self.__class__.__name__} should only find one match at a time, found"
2166 f" {len(string_indices)}"
2167 )
2168 string_idx = string_indices[0]
2234 if old_parens_exist:
2235 assert right_leaves and right_leaves[-1].type == token.RPAR, (
2236 "Apparently, old parentheses do NOT exist?!"
2237 f" (left_leaves={left_leaves}, right_leaves={right_leaves})"
2238 )
2239 old_rpar_leaf = right_leaves.pop()
2366 """ 2367 assert leaves[string_idx].type == token.STRING 2368
2477 2478 assert string_parent is not None 2479 assert string_child_idx is not None
2478 assert string_parent is not None 2479 assert string_child_idx is not None 2480
78 number = int(number) 79 assert symbol not in self.symbol2number 80 assert number not in self.number2symbol
79 assert symbol not in self.symbol2number 80 assert number not in self.number2symbol 81 self.symbol2number[symbol] = number
122 lineno, line = lineno + 1, next(f) 123 assert line == '#include "pgenheaders.h"\n', (lineno, line) 124 lineno, line = lineno + 1, next(f)
124 lineno, line = lineno + 1, next(f) 125 assert line == '#include "grammar.h"\n', (lineno, line) 126
133 mo = re.match(r"static arc arcs_(\d+)_(\d+)\[(\d+)\] = {$", line)
134 assert mo, (lineno, line)
135 n, m, k = list(map(int, mo.groups()))
139 mo = re.match(r"\s+{(\d+), (\d+)},$", line)
140 assert mo, (lineno, line)
141 i, j = list(map(int, mo.groups()))
143 lineno, line = lineno + 1, next(f) 144 assert line == "};\n", (lineno, line) 145 allarcs[(n, m)] = arcs
147 mo = re.match(r"static state states_(\d+)\[(\d+)\] = {$", line)
148 assert mo, (lineno, line)
149 s, t = list(map(int, mo.groups()))
149 s, t = list(map(int, mo.groups())) 150 assert s == len(states), (lineno, line) 151 state = []
154 mo = re.match(r"\s+{(\d+), arcs_(\d+)_(\d+)},$", line)
155 assert mo, (lineno, line)
156 k, n, m = list(map(int, mo.groups()))
157 arcs = allarcs[n, m] 158 assert k == len(arcs), (lineno, line) 159 state.append(arcs)
161 lineno, line = lineno + 1, next(f) 162 assert line == "};\n", (lineno, line) 163 lineno, line = lineno + 1, next(f)
168 mo = re.match(r"static dfa dfas\[(\d+)\] = {$", line)
169 assert mo, (lineno, line)
170 ndfas = int(mo.group(1))
173 mo = re.match(r'\s+{(\d+), "(\w+)", (\d+), (\d+), states_(\d+),$', line)
174 assert mo, (lineno, line)
175 symbol = mo.group(2)
176 number, x, y, z = list(map(int, mo.group(1, 3, 4, 5))) 177 assert self.symbol2number[symbol] == number, (lineno, line) 178 assert self.number2symbol[number] == symbol, (lineno, line)
177 assert self.symbol2number[symbol] == number, (lineno, line) 178 assert self.number2symbol[number] == symbol, (lineno, line) 179 assert x == 0, (lineno, line)
178 assert self.number2symbol[number] == symbol, (lineno, line) 179 assert x == 0, (lineno, line) 180 state = states[z]
180 state = states[z] 181 assert y == len(state), (lineno, line) 182 lineno, line = lineno + 1, next(f)
183 mo = re.match(r'\s+("(?:\\\d\d\d)*")},$', line)
184 assert mo, (lineno, line)
185 first = {}
185 first = {}
186 rawbitset = eval(mo.group(1))
187 for i, c in enumerate(rawbitset):
193 lineno, line = lineno + 1, next(f) 194 assert line == "};\n", (lineno, line) 195 self.dfas = dfas
200 mo = re.match(r"static label labels\[(\d+)\] = {$", line)
201 assert mo, (lineno, line)
202 nlabels = int(mo.group(1))
205 mo = re.match(r'\s+{(\d+), (0|"\w+")},$', line)
206 assert mo, (lineno, line)
207 x, y = mo.groups()
211 else: 212 y = eval(y) 213 labels.append((x, y))
214 lineno, line = lineno + 1, next(f) 215 assert line == "};\n", (lineno, line) 216 self.labels = labels
219 lineno, line = lineno + 1, next(f)
220 assert line == "grammar _PyParser_Grammar = {\n", (lineno, line)
221 lineno, line = lineno + 1, next(f)
222 mo = re.match(r"\s+(\d+),$", line) 223 assert mo, (lineno, line) 224 ndfas = int(mo.group(1))
224 ndfas = int(mo.group(1)) 225 assert ndfas == len(self.dfas) 226 lineno, line = lineno + 1, next(f)
226 lineno, line = lineno + 1, next(f) 227 assert line == "\tdfas,\n", (lineno, line) 228 lineno, line = lineno + 1, next(f)
229 mo = re.match(r"\s+{(\d+), labels},$", line)
230 assert mo, (lineno, line)
231 nlabels = int(mo.group(1))
231 nlabels = int(mo.group(1)) 232 assert nlabels == len(self.labels), (lineno, line) 233 lineno, line = lineno + 1, next(f)
234 mo = re.match(r"\s+(\d+)$", line) 235 assert mo, (lineno, line) 236 start = int(mo.group(1))
236 start = int(mo.group(1)) 237 assert start in self.number2symbol, (lineno, line) 238 self.start = start
239 lineno, line = lineno + 1, next(f) 240 assert line == "};\n", (lineno, line) 241 try:
245 else: 246 assert 0, (lineno, line) 247
85 for release_range in self._release_ranges: 86 assert release_range.end is not None 87
131 if start != (lineno, column): 132 assert (lineno, column) <= start, ((lineno, column), start) 133 s_lineno, s_column = start
150 if debug: 151 assert type is not None 152 self.logger.debug(
181 # We never broke out -- EOF is too soon (how can this happen???)
182 assert start is not None
183 raise parse.ParseError("incomplete input", type, value, (prefix, start))
183 raise parse.ParseError("incomplete input", type, value, (prefix, start))
184 assert p.rootnode is not None
185 return p.rootnode
292 data = pkgutil.get_data(package, pickled_name) 293 assert data is not None 294 g = grammar.Grammar()
16 import os 17 import pickle 18 import tempfile
123 with open(filename, "rb") as f: 124 d = pickle.load(f) 125 self._update(d)
128 """Load the grammar tables from a pickle bytes object.""" 129 self._update(pickle.loads(pkl)) 130
23 all, tail = m.group(0, 1)
24 assert all.startswith("\\")
25 esc = simple_escapes.get(tail)
44 def evalString(s: str) -> str:
45 assert s.startswith("'") or s.startswith('"'), repr(s[:1])
46 q = s[0]
48 q = q * 3 49 assert s.endswith(q), repr(s[-len(q) :]) 50 assert len(s) >= 2 * len(q)
49 assert s.endswith(q), repr(s[-len(q) :]) 50 assert len(s) >= 2 * len(q) 51 s = s[len(q) : -len(q)]
33 def lam_sub(grammar: Grammar, node: RawNode) -> NL: 34 assert node[3] is not None 35 return Node(type=node[0], children=node[3], context=node[2])
229 ilabels = self.classify(type, value, context) 230 assert len(ilabels) >= 1 231
269 ilabel = cast(int, recorder.determine_route(next_token_value, force=force)) 270 assert ilabel is not None 271
331 elif value in self.grammar.soft_keywords: 332 assert type in self.grammar.tokens 333 # Current soft keywords (match, case, type) can only appear at the
365 newnode = convert(self.grammar, rawnode) 366 assert node[-1] is not None 367 node[-1].append(newnode)
390 dfa, state, node = self.stack[-1] 391 assert node[-1] is not None 392 node[-1].append(newnode)
64 rawfirst = self.first[name]
65 assert rawfirst is not None
66 first = {}
88 itoken = getattr(token, label, None) 89 assert isinstance(itoken, int), label 90 assert itoken in token.tok_name, label
89 assert isinstance(itoken, int), label 90 assert itoken in token.tok_name, label 91 if itoken in c.tokens:
98 # Either a keyword or an operator
99 assert label[0] in ('"', "'"), label
100 value = eval(label)
99 assert label[0] in ('"', "'"), label
100 value = eval(label)
101 if value[0].isalpha():
146 fset = self.first[label] 147 assert fset is not None 148 totalset.update(fset)
185 startsymbol = name 186 assert startsymbol is not None 187 return dfas, startsymbol
193 # values. 194 assert isinstance(start, NFAState) 195 assert isinstance(finish, NFAState)
194 assert isinstance(start, NFAState) 195 assert isinstance(finish, NFAState) 196
202 def addclosure(state: NFAState, base: dict[NFAState, int]) -> None: 203 assert isinstance(state, NFAState) 204 if state in base:
362 def addarc(self, next: "NFAState", label: Optional[str] = None) -> None: 363 assert label is None or isinstance(label, str) 364 assert isinstance(next, NFAState)
363 assert label is None or isinstance(label, str) 364 assert isinstance(next, NFAState) 365 self.arcs.append((label, next))
373 def __init__(self, nfaset: dict[NFAState, Any], final: NFAState) -> None: 374 assert isinstance(nfaset, dict) 375 assert isinstance(next(iter(nfaset)), NFAState)
374 assert isinstance(nfaset, dict) 375 assert isinstance(next(iter(nfaset)), NFAState) 376 assert isinstance(final, NFAState)
375 assert isinstance(next(iter(nfaset)), NFAState) 376 assert isinstance(final, NFAState) 377 self.nfaset = nfaset
381 def addarc(self, next: "DFAState", label: str) -> None: 382 assert isinstance(label, str) 383 assert label not in self.arcs
382 assert isinstance(label, str) 383 assert label not in self.arcs 384 assert isinstance(next, DFAState)
383 assert label not in self.arcs 384 assert isinstance(next, DFAState) 385 self.arcs[label] = next
393 # Equality test -- ignore the nfaset instance variable 394 assert isinstance(other, DFAState) 395 if self.isfinal != other.isfinal:
160 161 if token.type == TokenType.newline and token_str == "": 162 # Black doesn't yield empty newline tokens at the end of a file
171 yield ( 172 ASYNC if token_str == "async" else AWAIT, 173 token_str,
177 ) 178 elif token.type == TokenType.op and token_str == "...": 179 # Black doesn't have an ellipsis token yet, yield 3 DOTs instead
179 # Black doesn't have an ellipsis token yet, yield 3 DOTs instead 180 assert token.start_line == token.end_line 181 assert token.end_col == token.start_col + 3
180 assert token.start_line == token.end_line 181 assert token.end_col == token.start_col + 3 182
182 183 token_str = "." 184 for start_col in range(token.start_col, token.start_col + 3):
183 python_grammar = driver.load_packaged_grammar("blib2to3", _GRAMMAR_FILE, cache_dir)
184 assert "print" not in python_grammar.keywords
185 assert "exec" not in python_grammar.keywords
184 assert "print" not in python_grammar.keywords 185 assert "exec" not in python_grammar.keywords 186
72 """Constructor that prevents Base from being instantiated.""" 73 assert cls is not Base, "Cannot instantiate Base" 74 return object.__new__(cls)
129 """Replace this node with a new one in the parent.""" 130 assert self.parent is not None, str(self) 131 assert new is not None
130 assert self.parent is not None, str(self) 131 assert new is not None 132 if not isinstance(new, list):
137 if ch is self: 138 assert not found, (self.parent.children, self, new) 139 if new is not None:
143 l_children.append(ch) 144 assert found, (self.children, self, new) 145 self.parent.children = l_children
193 self.parent.update_sibling_maps() 194 assert self.parent.next_sibling_map is not None 195 return self.parent.next_sibling_map[id(self)]
207 self.parent.update_sibling_maps() 208 assert self.parent.prev_sibling_map is not None 209 return self.parent.prev_sibling_map[id(self)]
253 """ 254 assert type >= 256, type 255 self.type = type
257 for ch in self.children: 258 assert ch.parent is None, repr(ch) 259 ch.parent = self
269 """Return a canonical string representation."""
270 assert self.type is not None
271 return f"{self.__class__.__name__}({type_repr(self.type)}, {self.children!r})"
285 def clone(self) -> "Node": 286 assert self.type is not None 287 """Return a cloned (deep) copy of self."""
402 403 assert 0 <= type < 256, type 404 if context is not None:
418 419 assert self.type is not None 420 return (
437 def clone(self) -> "Leaf": 438 assert self.type is not None 439 """Return a cloned (deep) copy of self."""
482 # creating a new node. 483 assert children is not None 484 if len(children) == 1:
516 """Constructor that prevents BasePattern from being instantiated.""" 517 assert cls is not BasePattern, "Cannot instantiate BasePattern" 518 return object.__new__(cls)
520 def __repr__(self) -> str: 521 assert self.type is not None 522 args = [type_repr(self.type), self.content, self.name]
557 if r: 558 assert results is not None 559 results.update(r)
603 if type is not None: 604 assert 0 <= type < 256, type 605 if content is not None:
605 if content is not None: 606 assert isinstance(content, str), repr(content) 607 self.type = type
657 if type is not None: 658 assert type >= 256, type 659 if content is not None:
659 if content is not None: 660 assert not isinstance(content, str), repr(content) 661 newcontent = list(content)
662 for i, item in enumerate(newcontent): 663 assert isinstance(item, BasePattern), (i, item) 664 # I don't even think this code is used anywhere, but it does cause
745 """ 746 assert 0 <= min <= max <= HUGE, (min, max) 747 if content is not None:
750 # Check sanity of alternatives 751 assert len(wrapped_content), repr( 752 wrapped_content 753 ) # Can't have zero alternatives 754 for alt in wrapped_content:
754 for alt in wrapped_content: 755 assert len(alt), repr(alt) # Can have empty alternatives 756 self.content = wrapped_content
890 break 891 assert self.name is not None 892 r[self.name] = nodes[:count]
896 """Helper to recursively yield the matches.""" 897 assert self.content is not None 898 if count >= self.min:
920 if content is not None: 921 assert isinstance(content, BasePattern), repr(content) 922 self.content = content
79 80 def __init__(self, token_type=None, token_string="", position=-1, error_code=0): 81 self.token_type = token_type 82 self.token_string = token_string 83 self.position = position 84 self.error_code = error_code 85
590 # Ensure that the operation is not NOT 591 assert operation in ( 592 self.AND, 593 self.OR, 594 ) 595 # Move NOT inwards.
1055
1056 assert all(
1057 isinstance(arg, Expression) for arg in args
1058 ), f"Bad arguments: all arguments must be an Expression: {args!r}"
1059 self.args = tuple(args)
39 expr = algebra.parse(expr_str) 40 assert str(expr) == expr_str 41
73 74 assert expr.pretty() == expected.pretty() 75 assert expr == expected
74 assert expr.pretty() == expected.pretty() 75 assert expr == expected 76
90 ) 91 assert expr == expected 92
137 ) 138 assert expr == expected 139
271 ) 272 assert expr == expected 273
279 expected = algebra.AND(algebra.Symbol("l-a"), algebra.Symbol("b+c"))
280 assert expr == expected
281
289 except ParseError as pe: 290 assert pe.error_code == PARSE_UNKNOWN_TOKEN 291
298 except ParseError as pe: 299 assert pe.error_code == PARSE_UNKNOWN_TOKEN 300
307 except ParseError as pe: 308 assert pe.error_code == PARSE_UNKNOWN_TOKEN 309
316 except ParseError as pe: 317 assert pe.error_code == PARSE_UNKNOWN_TOKEN 318
325 except ParseError as pe: 326 assert pe.error_code == PARSE_UNKNOWN_TOKEN 327
334 except ParseError as pe: 335 assert pe.error_code == PARSE_UNKNOWN_TOKEN 336
343 except ParseError as pe: 344 assert pe.error_code == PARSE_UNKNOWN_TOKEN 345
352 except ParseError as pe: 353 assert pe.error_code == PARSE_INVALID_OPERATOR_SEQUENCE 354
361 except ParseError as pe: 362 assert pe.error_code == PARSE_INVALID_OPERATOR_SEQUENCE 363
369 except ParseError as pe: 370 assert pe.error_code == PARSE_INVALID_SYMBOL_SEQUENCE 371
377 except ParseError as pe: 378 assert pe.error_code == PARSE_INVALID_EXPRESSION 379
385 except ParseError as pe: 386 assert pe.error_code == PARSE_INVALID_SYMBOL_SEQUENCE 387
393 except ParseError as pe: 394 assert pe.error_code == PARSE_INVALID_NESTING 395
401 algebra = BooleanAlgebra() 402 assert algebra.TRUE == algebra.TRUE 403 BaseElement()
405 self.assertRaises(TypeError, BaseElement, "a") 406 assert algebra.TRUE is algebra.TRUE 407 assert algebra.TRUE is not algebra.FALSE
406 assert algebra.TRUE is algebra.TRUE 407 assert algebra.TRUE is not algebra.FALSE 408 assert algebra.FALSE is algebra.FALSE
407 assert algebra.TRUE is not algebra.FALSE 408 assert algebra.FALSE is algebra.FALSE 409 assert bool(algebra.TRUE) is True
408 assert algebra.FALSE is algebra.FALSE 409 assert bool(algebra.TRUE) is True 410 assert bool(algebra.FALSE) is False
409 assert bool(algebra.TRUE) is True 410 assert bool(algebra.FALSE) is False 411 assert algebra.TRUE == True
410 assert bool(algebra.FALSE) is False 411 assert algebra.TRUE == True 412 assert algebra.FALSE == False
411 assert algebra.TRUE == True 412 assert algebra.FALSE == False 413
415 algebra = BooleanAlgebra() 416 assert algebra.TRUE.literals == set() 417 assert algebra.FALSE.literals == set()
416 assert algebra.TRUE.literals == set() 417 assert algebra.FALSE.literals == set() 418
420 algebra = BooleanAlgebra() 421 assert algebra.TRUE.literalize() == algebra.TRUE 422 assert algebra.FALSE.literalize() == algebra.FALSE
421 assert algebra.TRUE.literalize() == algebra.TRUE 422 assert algebra.FALSE.literalize() == algebra.FALSE 423
425 algebra = BooleanAlgebra() 426 assert algebra.TRUE.simplify() == algebra.TRUE 427 assert algebra.FALSE.simplify() == algebra.FALSE
426 assert algebra.TRUE.simplify() == algebra.TRUE 427 assert algebra.FALSE.simplify() == algebra.FALSE 428
431 algebra2 = BooleanAlgebra() 432 assert algebra1.TRUE.simplify() == algebra2.TRUE 433 assert algebra1.FALSE.simplify() == algebra2.FALSE
432 assert algebra1.TRUE.simplify() == algebra2.TRUE 433 assert algebra1.FALSE.simplify() == algebra2.FALSE 434
436 algebra = BooleanAlgebra() 437 assert algebra.TRUE.dual == algebra.FALSE 438 assert algebra.FALSE.dual == algebra.TRUE
437 assert algebra.TRUE.dual == algebra.FALSE 438 assert algebra.FALSE.dual == algebra.TRUE 439
441 algebra = BooleanAlgebra() 442 assert algebra.TRUE == algebra.TRUE 443 assert algebra.FALSE == algebra.FALSE
442 assert algebra.TRUE == algebra.TRUE 443 assert algebra.FALSE == algebra.FALSE 444 assert algebra.TRUE != algebra.FALSE
443 assert algebra.FALSE == algebra.FALSE 444 assert algebra.TRUE != algebra.FALSE 445
447 algebra = BooleanAlgebra() 448 assert algebra.FALSE < algebra.TRUE 449 assert algebra.TRUE > algebra.FALSE
448 assert algebra.FALSE < algebra.TRUE 449 assert algebra.TRUE > algebra.FALSE 450
452 algebra = BooleanAlgebra() 453 assert str(algebra.TRUE) == "1" 454 assert str(algebra.FALSE) == "0"
453 assert str(algebra.TRUE) == "1" 454 assert str(algebra.FALSE) == "0" 455 assert repr(algebra.TRUE) == "TRUE"
454 assert str(algebra.FALSE) == "0" 455 assert repr(algebra.TRUE) == "TRUE" 456 assert repr(algebra.FALSE) == "FALSE"
455 assert repr(algebra.TRUE) == "TRUE" 456 assert repr(algebra.FALSE) == "FALSE" 457
468 def test_isliteral(self): 469 assert Symbol(1).isliteral is True 470
473 l2 = Symbol(1) 474 assert l1 in l1.literals 475 assert l1 in l2.literals
474 assert l1 in l1.literals 475 assert l1 in l2.literals 476 assert l2 in l1.literals
475 assert l1 in l2.literals 476 assert l2 in l1.literals 477 assert l2 in l2.literals
476 assert l2 in l1.literals 477 assert l2 in l2.literals 478 self.assertRaises(AttributeError, setattr, l1, "literals", 1)
481 s = Symbol(1) 482 assert s.literalize() == s 483
485 s = Symbol(1) 486 assert s.simplify() == s 487
490 s2 = Symbol(1) 491 assert s1.simplify() == s2.simplify() 492
502 # Test __eq__. 503 assert a == a 504 assert a == a2
503 assert a == a 504 assert a == a2 505 assert not a == c
504 assert a == a2 505 assert not a == c 506 assert not a2 == c
505 assert not a == c 506 assert not a2 == c 507 assert d == d
506 assert not a2 == c 507 assert d == d 508 assert not d == e
507 assert d == d 508 assert not d == e 509 assert not a == d
508 assert not d == e 509 assert not a == d 510 # Test __ne__.
510 # Test __ne__. 511 assert not a != a 512 assert not a != a2
511 assert not a != a 512 assert not a != a2 513 assert a != c
512 assert not a != a2 513 assert a != c 514 assert a2 != c
513 assert a != c 514 assert a2 != c 515
517 S = Symbol
518 assert S("x") < S("y")
519 assert S("y") > S("x")
518 assert S("x") < S("y")
519 assert S("y") > S("x")
520 assert S(1) < S(2)
519 assert S("y") > S("x")
520 assert S(1) < S(2)
521 assert S(2) > S(1)
520 assert S(1) < S(2) 521 assert S(2) > S(1) 522
523 def test_printing(self):
524 assert str(Symbol("a")) == "a"
525 assert str(Symbol(1)) == "1"
524 assert str(Symbol("a")) == "a"
525 assert str(Symbol(1)) == "1"
526 assert repr(Symbol("a")) == "Symbol('a')"
525 assert str(Symbol(1)) == "1"
526 assert repr(Symbol("a")) == "Symbol('a')"
527 assert repr(Symbol(1)) == "Symbol(1)"
526 assert repr(Symbol("a")) == "Symbol('a')"
527 assert repr(Symbol(1)) == "Symbol(1)"
528
535 algebra.NOT(algebra.Symbol("a"))
536 assert (algebra.NOT(algebra.TRUE)).simplify() == algebra.FALSE
537 assert (algebra.NOT(algebra.FALSE)).simplify() == algebra.TRUE
536 assert (algebra.NOT(algebra.TRUE)).simplify() == algebra.FALSE 537 assert (algebra.NOT(algebra.FALSE)).simplify() == algebra.TRUE 538
541 s = algebra.Symbol(1)
542 assert algebra.NOT(s).isliteral
543 assert not algebra.parse("~(a|b)").isliteral
542 assert algebra.NOT(s).isliteral
543 assert not algebra.parse("~(a|b)").isliteral
544
548 l = ~a 549 assert l.isliteral 550 assert l in l.literals
549 assert l.isliteral 550 assert l in l.literals 551 assert len(l.literals) == 1
550 assert l in l.literals 551 assert len(l.literals) == 1 552
553 l = algebra.parse("~(a&a)")
554 assert not l.isliteral
555 assert a in l.literals
554 assert not l.isliteral 555 assert a in l.literals 556 assert len(l.literals) == 1
555 assert a in l.literals 556 assert len(l.literals) == 1 557
558 l = algebra.parse("~(a&a)", simplify=True)
559 assert l.isliteral
560
562 parse = BooleanAlgebra().parse
563 assert parse("~a") == parse("~a").literalize()
564 assert parse("~a|~b") == parse("~(a&b)").literalize()
563 assert parse("~a") == parse("~a").literalize()
564 assert parse("~a|~b") == parse("~(a&b)").literalize()
565 assert parse("~a&~b") == parse("~(a|b)").literalize()
564 assert parse("~a|~b") == parse("~(a&b)").literalize()
565 assert parse("~a&~b") == parse("~(a|b)").literalize()
566
569 a = algebra.Symbol("a")
570 assert ~a == ~a
571 assert algebra.Symbol("a") == algebra.Symbol("a")
570 assert ~a == ~a
571 assert algebra.Symbol("a") == algebra.Symbol("a")
572 assert algebra.parse("~~a") != a
571 assert algebra.Symbol("a") == algebra.Symbol("a")
572 assert algebra.parse("~~a") != a
573 assert (~~a).simplify() == a
572 assert algebra.parse("~~a") != a
573 assert (~~a).simplify() == a
574 assert (~~~a).simplify() == ~a
573 assert (~~a).simplify() == a 574 assert (~~~a).simplify() == ~a 575 assert (~~~~a).simplify() == a
574 assert (~~~a).simplify() == ~a 575 assert (~~~~a).simplify() == a 576 assert (~(a & a & a)).simplify() == (~(a & a & a)).simplify()
575 assert (~~~~a).simplify() == a
576 assert (~(a & a & a)).simplify() == (~(a & a & a)).simplify()
577 assert algebra.parse("~~a", simplify=True) == a
576 assert (~(a & a & a)).simplify() == (~(a & a & a)).simplify()
577 assert algebra.parse("~~a", simplify=True) == a
578 algebra2 = BooleanAlgebra()
578 algebra2 = BooleanAlgebra()
579 assert algebra2.parse("~~a", simplify=True) == a
580
583 a = algebra.Symbol("a")
584 assert (~a).cancel() == ~a
585 assert algebra.parse("~~a").cancel() == a
584 assert (~a).cancel() == ~a
585 assert algebra.parse("~~a").cancel() == a
586 assert algebra.parse("~~~a").cancel() == ~a
585 assert algebra.parse("~~a").cancel() == a
586 assert algebra.parse("~~~a").cancel() == ~a
587 assert algebra.parse("~~~~a").cancel() == a
586 assert algebra.parse("~~~a").cancel() == ~a
587 assert algebra.parse("~~~~a").cancel() == a
588
593 c = algebra.Symbol("c")
594 assert algebra.parse("~(a&b)").demorgan() == ~a | ~b
595 assert algebra.parse("~(a|b|c)").demorgan() == algebra.parse("~a&~b&~c")
594 assert algebra.parse("~(a&b)").demorgan() == ~a | ~b
595 assert algebra.parse("~(a|b|c)").demorgan() == algebra.parse("~a&~b&~c")
596 assert algebra.parse("~(~a&b)").demorgan() == a | ~b
595 assert algebra.parse("~(a|b|c)").demorgan() == algebra.parse("~a&~b&~c")
596 assert algebra.parse("~(~a&b)").demorgan() == a | ~b
597 assert (~~(a & b | c)).demorgan() == a & b | c
596 assert algebra.parse("~(~a&b)").demorgan() == a | ~b
597 assert (~~(a & b | c)).demorgan() == a & b | c
598 assert (~~~(a & b | c)).demorgan() == ~(a & b) & ~c
597 assert (~~(a & b | c)).demorgan() == a & b | c
598 assert (~~~(a & b | c)).demorgan() == ~(a & b) & ~c
599 assert algebra.parse("~" * 10 + "(a&b|c)").demorgan() == a & b | c
598 assert (~~~(a & b | c)).demorgan() == ~(a & b) & ~c
599 assert algebra.parse("~" * 10 + "(a&b|c)").demorgan() == a & b | c
600 assert algebra.parse("~" * 11 + "(a&b|c)").demorgan() == (~(a & b | c)).demorgan()
599 assert algebra.parse("~" * 10 + "(a&b|c)").demorgan() == a & b | c
600 assert algebra.parse("~" * 11 + "(a&b|c)").demorgan() == (~(a & b | c)).demorgan()
601 _0 = algebra.FALSE
602 _1 = algebra.TRUE 603 assert (~(_0)).demorgan() == _1 604 assert (~(_1)).demorgan() == _0
603 assert (~(_0)).demorgan() == _1 604 assert (~(_1)).demorgan() == _0 605
609 y = algebra.Symbol(2) 610 assert x < ~x 611 assert ~x > x
610 assert x < ~x 611 assert ~x > x 612 assert ~x < y
611 assert ~x > x 612 assert ~x < y 613 assert y > ~x
612 assert ~x < y 613 assert y > ~x 614
617 a = algebra.Symbol("a")
618 assert str(~a) == "~a"
619 assert repr(~a) == "NOT(Symbol('a'))"
618 assert str(~a) == "~a"
619 assert repr(~a) == "NOT(Symbol('a'))"
620 expr = algebra.parse("~(a&a)")
620 expr = algebra.parse("~(a&a)")
621 assert str(expr) == "~(a&a)"
622 assert repr(expr), "NOT(AND(Symbol('a') == Symbol('a')))"
621 assert str(expr) == "~(a&a)"
622 assert repr(expr), "NOT(AND(Symbol('a') == Symbol('a')))"
623
639 for term in (t1, t2, t3, t4): 640 assert isinstance(term, DualBase) 641
648 649 assert not t1.isliteral 650 assert not t2.isliteral
649 assert not t1.isliteral 650 assert not t2.isliteral 651
661 for term in (t1, t2, t3, t4): 662 assert a in term.literals 663 for term in (t1, t2, t4):
663 for term in (t1, t2, t4): 664 assert b in term.literals 665 for term in (t2, t4):
665 for term in (t2, t4): 666 assert c in term.literals 667
669 parse = BooleanAlgebra().parse
670 assert parse("a|~(b|c)").literalize() == parse("a|(~b&~c)")
671
673 algebra = BooleanAlgebra()
674 assert algebra.parse("a&a").annihilator == algebra.FALSE
675 assert algebra.parse("a|a").annihilator == algebra.TRUE
674 assert algebra.parse("a&a").annihilator == algebra.FALSE
675 assert algebra.parse("a|a").annihilator == algebra.TRUE
676
678 algebra = BooleanAlgebra()
679 assert algebra.parse("a|b").identity == algebra.FALSE
680 assert algebra.parse("a&b").identity == algebra.TRUE
679 assert algebra.parse("a|b").identity == algebra.FALSE
680 assert algebra.parse("a&b").identity == algebra.TRUE
681
683 algebra = BooleanAlgebra()
684 assert algebra.AND(algebra.Symbol("a"), algebra.Symbol("b")).dual == algebra.OR
685 assert algebra.OR(algebra.Symbol("a"), algebra.Symbol("b")).dual == algebra.AND
684 assert algebra.AND(algebra.Symbol("a"), algebra.Symbol("b")).dual == algebra.OR
685 assert algebra.OR(algebra.Symbol("a"), algebra.Symbol("b")).dual == algebra.AND
686
686
687 assert algebra.parse("a|b").dual == algebra.AND
688 assert algebra.parse("a&b").dual == algebra.OR
687 assert algebra.parse("a|b").dual == algebra.AND
688 assert algebra.parse("a&b").dual == algebra.OR
689
699 # Idempotence 700 assert (a & a).simplify() == a 701 # Idempotence + Associativity
701 # Idempotence + Associativity 702 assert (a | (a | b)).simplify() == a | b 703 # Annihilation
703 # Annihilation 704 assert (a & _0).simplify() == _0 705 assert (a | _1).simplify() == _1
704 assert (a & _0).simplify() == _0 705 assert (a | _1).simplify() == _1 706 # Identity
706 # Identity 707 assert (a & _1).simplify() == a 708 assert (a | _0).simplify() == a
707 assert (a & _1).simplify() == a 708 assert (a | _0).simplify() == a 709 # Complementation
709 # Complementation 710 assert (a & ~a).simplify() == _0 711 assert (a | ~a).simplify() == _1
710 assert (a & ~a).simplify() == _0 711 assert (a | ~a).simplify() == _1 712 # Absorption
712 # Absorption 713 assert (a & (a | b)).simplify() == a 714 assert (a | (a & b)).simplify() == a
713 assert (a & (a | b)).simplify() == a 714 assert (a | (a & b)).simplify() == a 715 assert ((b & a) | (b & a & c)).simplify() == b & a
714 assert (a | (a & b)).simplify() == a 715 assert ((b & a) | (b & a & c)).simplify() == b & a 716
717 # Elimination 718 assert ((a & ~b) | (a & b)).simplify() == a 719
722 unsorted_expression = (b & b & a).simplify(sort=False) 723 assert unsorted_expression == sorted_expression 724 assert sorted_expression.pretty() != unsorted_expression.pretty()
723 assert unsorted_expression == sorted_expression 724 assert sorted_expression.pretty() != unsorted_expression.pretty() 725
727 unsorted_expression = (b | b | a).simplify(sort=False) 728 assert unsorted_expression == sorted_expression 729 assert sorted_expression.pretty() != unsorted_expression.pretty()
728 assert unsorted_expression == sorted_expression 729 assert sorted_expression.pretty() != unsorted_expression.pretty() 730
732 result = algebra1.parse("(~a&b&c) | (a&~b&c) | (a&b&~c) | (a&b&c)", simplify=True)
733 assert result == expected
734
736 result = algebra2.parse("(~a&b&c) | (a&~b&c) | (a&b&~c) | (a&b&c)", simplify=True)
737 assert result == expected
738
740 result = algebra1.parse("(a&b&c&d) | (b&d)", simplify=True)
741 assert result == expected
742
744 result = algebra2.parse("(a&b&c&d) | (b&d)", simplify=True)
745 assert result == expected
746
752 ) 753 assert result.pretty() == expected.pretty() 754
760 ) 761 assert result.pretty() == expected.pretty() 762
779 780 assert len(result_original) == 2 781 assert len(result_swapped) == 2
780 assert len(result_original) == 2 781 assert len(result_swapped) == 2 782 assert result_original[0] == result_swapped[1]
781 assert len(result_swapped) == 2 782 assert result_original[0] == result_swapped[1] 783 assert result_original[1] == result_swapped[0]
782 assert result_original[0] == result_swapped[1] 783 assert result_original[1] == result_swapped[0] 784
799 # vs. expression built from an object tree vs. expression built from a parse 800 assert parsed.pretty() == test_expression.pretty() 801 assert parsed == test_expression
800 assert parsed.pretty() == test_expression.pretty() 801 assert parsed == test_expression 802
834 # vs. expression built from an object tree vs. expression built from a parse 835 assert parsed.pretty() == test_expression.pretty() 836
866 867 assert parsed.pretty() == test_expression.pretty() 868
901 parsed = parse(test_expression_str) 902 assert test_expression_str == str(parsed) 903
904 expected = (a & ~b & d) | (~a & b) | (~a & ~c) | (b & c & d) 905 assert test_expression.simplify().pretty() == expected.pretty() 906
911 # vs. expression built from an object tree vs. expression built from a parse 912 assert parsed.simplify().pretty() == expected.simplify().pretty() 913
914 expected_str = "(a&~b&d)|(~a&b)|(~a&~c)|(b&c&d)" 915 assert str(parsed) == expected_str 916
917 parsed2 = parse(test_expression_str) 918 assert parsed2.simplify().pretty() == expected.pretty() 919
919 920 assert str(parsed2.simplify()) == expected_str 921
983 result = result.simplify() 984 assert result == expected 985
992 993 assert context.exception.error_code == PARSE_INVALID_NESTING 994
1000 result = parse("b")
1001 assert expr.subtract(p1, simplify=True) == expr
1002 assert expr.subtract(p2, simplify=True) == result
1001 assert expr.subtract(p1, simplify=True) == expr 1002 assert expr.subtract(p2, simplify=True) == result 1003
1008 t2 = parse("a&b&c")
1009 assert t1 != t2
1010 assert t1.flatten() == t2
1009 assert t1 != t2 1010 assert t1.flatten() == t2 1011
1013 t2 = parse("a | (b&c) | (a&c) | b")
1014 assert t1 != t2
1015 assert t1.flatten() == t2
1014 assert t1 != t2 1015 assert t1.flatten() == t2 1016
1023 e = algebra.Symbol("e")
1024 assert (a & (b | c)).distributive() == (a & b) | (a & c)
1025 t1 = algebra.AND(a, (b | c), (d | e))
1028 ) 1029 assert t1.distributive() == t2 1030
1041 # Test __eq__. 1042 assert t1 == t1 1043 assert t1_2 == t1
1042 assert t1 == t1 1043 assert t1_2 == t1 1044 assert t2_2 == t2
1043 assert t1_2 == t1 1044 assert t2_2 == t2 1045 assert not t1 == t2
1044 assert t2_2 == t2 1045 assert not t1 == t2 1046 assert not t1 == 1
1045 assert not t1 == t2 1046 assert not t1 == 1 1047 assert not t1 is True
1046 assert not t1 == 1 1047 assert not t1 is True 1048 assert not t1 is None
1047 assert not t1 is True 1048 assert not t1 is None 1049
1050 # Test __ne__. 1051 assert not t1 != t1 1052 assert not t1_2 != t1
1051 assert not t1 != t1 1052 assert not t1_2 != t1 1053 assert not t2_2 != t2
1052 assert not t1_2 != t1 1053 assert not t2_2 != t2 1054 assert t1 != t2
1053 assert not t2_2 != t2 1054 assert t1 != t2 1055 assert t1 != 1
1054 assert t1 != t2 1055 assert t1 != 1 1056 assert t1 is not True
1055 assert t1 != 1 1056 assert t1 is not True 1057 assert t1 is not None
1056 assert t1 is not True 1057 assert t1 is not None 1058
1061 x, y, z = algebra.Symbol(1), algebra.Symbol(2), algebra.Symbol(3) 1062 assert algebra.AND(x, y) < algebra.AND(x, y, z) 1063 assert not algebra.AND(x, y) > algebra.AND(x, y, z)
1062 assert algebra.AND(x, y) < algebra.AND(x, y, z) 1063 assert not algebra.AND(x, y) > algebra.AND(x, y, z) 1064 assert algebra.AND(x, y) < algebra.AND(x, z)
1063 assert not algebra.AND(x, y) > algebra.AND(x, y, z) 1064 assert algebra.AND(x, y) < algebra.AND(x, z) 1065 assert not algebra.AND(x, y) > algebra.AND(x, z)
1064 assert algebra.AND(x, y) < algebra.AND(x, z) 1065 assert not algebra.AND(x, y) > algebra.AND(x, z) 1066 assert algebra.AND(x, y) < algebra.AND(y, z)
1065 assert not algebra.AND(x, y) > algebra.AND(x, z) 1066 assert algebra.AND(x, y) < algebra.AND(y, z) 1067 assert not algebra.AND(x, y) > algebra.AND(y, z)
1066 assert algebra.AND(x, y) < algebra.AND(y, z) 1067 assert not algebra.AND(x, y) > algebra.AND(y, z) 1068 assert not algebra.AND(x, y) < algebra.AND(x, y)
1067 assert not algebra.AND(x, y) > algebra.AND(y, z) 1068 assert not algebra.AND(x, y) < algebra.AND(x, y) 1069 assert not algebra.AND(x, y) > algebra.AND(x, y)
1068 assert not algebra.AND(x, y) < algebra.AND(x, y) 1069 assert not algebra.AND(x, y) > algebra.AND(x, y) 1070
1072 parse = BooleanAlgebra().parse
1073 assert str(parse("a&a")) == "a&a"
1074 assert repr(parse("a&a")), "AND(Symbol('a') == Symbol('a'))"
1073 assert str(parse("a&a")) == "a&a"
1074 assert repr(parse("a&a")), "AND(Symbol('a') == Symbol('a'))"
1075 assert str(parse("a|a")) == "a|a"
1074 assert repr(parse("a&a")), "AND(Symbol('a') == Symbol('a'))"
1075 assert str(parse("a|a")) == "a|a"
1076 assert repr(parse("a|a")), "OR(Symbol('a') == Symbol('a'))"
1075 assert str(parse("a|a")) == "a|a"
1076 assert repr(parse("a|a")), "OR(Symbol('a') == Symbol('a'))"
1077 assert str(parse("(a|b)&c")) == "(a|b)&c"
1076 assert repr(parse("a|a")), "OR(Symbol('a') == Symbol('a'))"
1077 assert str(parse("(a|b)&c")) == "(a|b)&c"
1078 assert repr(parse("(a|b)&c")), "AND(OR(Symbol('a'), Symbol('b')) == Symbol('c'))"
1077 assert str(parse("(a|b)&c")) == "(a|b)&c"
1078 assert repr(parse("(a|b)&c")), "AND(OR(Symbol('a'), Symbol('b')) == Symbol('c'))"
1079
1094 for case2 in order[j]: 1095 assert case1 < case2 1096 assert case2 > case1
1095 assert case1 < case2 1096 assert case2 > case1 1097
1100 a, b, c = algebra.Symbol("a"), algebra.Symbol("b"), algebra.Symbol("c")
1101 assert algebra.parse("0") == algebra.FALSE
1102 assert algebra.parse("(0)") == algebra.FALSE
1101 assert algebra.parse("0") == algebra.FALSE
1102 assert algebra.parse("(0)") == algebra.FALSE
1103 assert algebra.parse("1") == algebra.TRUE
1102 assert algebra.parse("(0)") == algebra.FALSE
1103 assert algebra.parse("1") == algebra.TRUE
1104 assert algebra.parse("(1)") == algebra.TRUE
1103 assert algebra.parse("1") == algebra.TRUE
1104 assert algebra.parse("(1)") == algebra.TRUE
1105 assert algebra.parse("a") == a
1104 assert algebra.parse("(1)") == algebra.TRUE
1105 assert algebra.parse("a") == a
1106 assert algebra.parse("(a)") == a
1105 assert algebra.parse("a") == a
1106 assert algebra.parse("(a)") == a
1107 assert algebra.parse("(a)") == a
1106 assert algebra.parse("(a)") == a
1107 assert algebra.parse("(a)") == a
1108 assert algebra.parse("~a") == algebra.parse("~(a)")
1107 assert algebra.parse("(a)") == a
1108 assert algebra.parse("~a") == algebra.parse("~(a)")
1109 assert algebra.parse("~(a)") == algebra.parse("(~a)")
1108 assert algebra.parse("~a") == algebra.parse("~(a)")
1109 assert algebra.parse("~(a)") == algebra.parse("(~a)")
1110 assert algebra.parse("~a") == ~a
1109 assert algebra.parse("~(a)") == algebra.parse("(~a)")
1110 assert algebra.parse("~a") == ~a
1111 assert algebra.parse("(~a)") == ~a
1110 assert algebra.parse("~a") == ~a
1111 assert algebra.parse("(~a)") == ~a
1112 assert algebra.parse("~~a", simplify=True) == (~~a).simplify()
1111 assert algebra.parse("(~a)") == ~a
1112 assert algebra.parse("~~a", simplify=True) == (~~a).simplify()
1113 assert algebra.parse("a&b") == a & b
1112 assert algebra.parse("~~a", simplify=True) == (~~a).simplify()
1113 assert algebra.parse("a&b") == a & b
1114 assert algebra.parse("~a&b") == ~a & b
1113 assert algebra.parse("a&b") == a & b
1114 assert algebra.parse("~a&b") == ~a & b
1115 assert algebra.parse("a&~b") == a & ~b
1114 assert algebra.parse("~a&b") == ~a & b
1115 assert algebra.parse("a&~b") == a & ~b
1116 assert algebra.parse("a&b&c") == algebra.parse("a&b&c")
1115 assert algebra.parse("a&~b") == a & ~b
1116 assert algebra.parse("a&b&c") == algebra.parse("a&b&c")
1117 assert algebra.parse("a&b&c") == algebra.AND(a, b, c)
1116 assert algebra.parse("a&b&c") == algebra.parse("a&b&c")
1117 assert algebra.parse("a&b&c") == algebra.AND(a, b, c)
1118 assert algebra.parse("~a&~b&~c") == algebra.parse("~a&~b&~c")
1117 assert algebra.parse("a&b&c") == algebra.AND(a, b, c)
1118 assert algebra.parse("~a&~b&~c") == algebra.parse("~a&~b&~c")
1119 assert algebra.parse("~a&~b&~c") == algebra.AND(~a, ~b, ~c)
1118 assert algebra.parse("~a&~b&~c") == algebra.parse("~a&~b&~c")
1119 assert algebra.parse("~a&~b&~c") == algebra.AND(~a, ~b, ~c)
1120 assert algebra.parse("a|b") == a | b
1119 assert algebra.parse("~a&~b&~c") == algebra.AND(~a, ~b, ~c)
1120 assert algebra.parse("a|b") == a | b
1121 assert algebra.parse("~a|b") == ~a | b
1120 assert algebra.parse("a|b") == a | b
1121 assert algebra.parse("~a|b") == ~a | b
1122 assert algebra.parse("a|~b") == a | ~b
1121 assert algebra.parse("~a|b") == ~a | b
1122 assert algebra.parse("a|~b") == a | ~b
1123 assert algebra.parse("a|b|c") == algebra.parse("a|b|c")
1122 assert algebra.parse("a|~b") == a | ~b
1123 assert algebra.parse("a|b|c") == algebra.parse("a|b|c")
1124 assert algebra.parse("a|b|c") == algebra.OR(a, b, c)
1123 assert algebra.parse("a|b|c") == algebra.parse("a|b|c")
1124 assert algebra.parse("a|b|c") == algebra.OR(a, b, c)
1125 assert algebra.parse("~a|~b|~c") == algebra.OR(~a, ~b, ~c)
1124 assert algebra.parse("a|b|c") == algebra.OR(a, b, c)
1125 assert algebra.parse("~a|~b|~c") == algebra.OR(~a, ~b, ~c)
1126 assert algebra.parse("(a|b)") == a | b
1125 assert algebra.parse("~a|~b|~c") == algebra.OR(~a, ~b, ~c)
1126 assert algebra.parse("(a|b)") == a | b
1127 assert algebra.parse("a&(a|b)", simplify=True) == (a & (a | b)).simplify()
1126 assert algebra.parse("(a|b)") == a | b
1127 assert algebra.parse("a&(a|b)", simplify=True) == (a & (a | b)).simplify()
1128 assert algebra.parse("a&(a|~b)", simplify=True) == (a & (a | ~b)).simplify()
1127 assert algebra.parse("a&(a|b)", simplify=True) == (a & (a | b)).simplify()
1128 assert algebra.parse("a&(a|~b)", simplify=True) == (a & (a | ~b)).simplify()
1129 assert (
1128 assert algebra.parse("a&(a|~b)", simplify=True) == (a & (a | ~b)).simplify()
1129 assert (
1130 algebra.parse("(a&b)|(b&((c|a)&(b|(c&a))))", simplify=True)
1131 == ((a & b) | (b & ((c | a) & (b | (c & a))))).simplify()
1132 )
1133 assert algebra.parse("(a&b)|(b&((c|a)&(b|(c&a))))", simplify=True) == algebra.parse(
1132 )
1133 assert algebra.parse("(a&b)|(b&((c|a)&(b|(c&a))))", simplify=True) == algebra.parse(
1134 "a&b | b&(c|a)&(b|c&a)", simplify=True
1135 )
1136 assert algebra.Symbol("1abc") == algebra.parse("1abc")
1135 )
1136 assert algebra.Symbol("1abc") == algebra.parse("1abc")
1137 assert algebra.Symbol("_abc") == algebra.parse("_abc")
1136 assert algebra.Symbol("1abc") == algebra.parse("1abc")
1137 assert algebra.Symbol("_abc") == algebra.parse("_abc")
1138
1142 expr = a & b | c
1143 assert expr.subs({a: b}).simplify() == b | c
1144 assert expr.subs({a: a}).simplify() == expr
1143 assert expr.subs({a: b}).simplify() == b | c
1144 assert expr.subs({a: a}).simplify() == expr
1145 assert expr.subs({a: b | c}).simplify() == algebra.parse("(b|c)&b|c").simplify()
1144 assert expr.subs({a: a}).simplify() == expr
1145 assert expr.subs({a: b | c}).simplify() == algebra.parse("(b|c)&b|c").simplify()
1146 assert expr.subs({a & b: a}).simplify() == a | c
1145 assert expr.subs({a: b | c}).simplify() == algebra.parse("(b|c)&b|c").simplify()
1146 assert expr.subs({a & b: a}).simplify() == a | c
1147 assert expr.subs({c: algebra.TRUE}).simplify() == algebra.TRUE
1146 assert expr.subs({a & b: a}).simplify() == a | c
1147 assert expr.subs({c: algebra.TRUE}).simplify() == algebra.TRUE
1148
1152 expr = a & b | c
1153 assert expr.subs({}, default=algebra.TRUE).simplify() == algebra.TRUE
1154 assert (
1153 assert expr.subs({}, default=algebra.TRUE).simplify() == algebra.TRUE
1154 assert (
1155 expr.subs({a: algebra.FALSE, c: algebra.FALSE}, default=algebra.TRUE).simplify()
1156 == algebra.FALSE
1157 )
1158 assert algebra.TRUE.subs({}, default=algebra.FALSE).simplify() == algebra.TRUE
1157 )
1158 assert algebra.TRUE.subs({}, default=algebra.FALSE).simplify() == algebra.TRUE
1159 assert algebra.FALSE.subs({}, default=algebra.TRUE).simplify() == algebra.FALSE
1158 assert algebra.TRUE.subs({}, default=algebra.FALSE).simplify() == algebra.TRUE
1159 assert algebra.FALSE.subs({}, default=algebra.TRUE).simplify() == algebra.FALSE
1160
1164 expr = algebra.parse("a&b")
1165 assert algebra.dnf(expr) == expr
1166 assert algebra.cnf(expr) == expr
1165 assert algebra.dnf(expr) == expr 1166 assert algebra.cnf(expr) == expr 1167
1168 expr = algebra.parse("a|b")
1169 assert algebra.dnf(expr) == expr
1170 assert algebra.cnf(expr) == expr
1169 assert algebra.dnf(expr) == expr 1170 assert algebra.cnf(expr) == expr 1171
1174 result_cnf = algebra.parse("b&(a|c)")
1175 assert algebra.dnf(expr) == result_dnf
1176 assert algebra.cnf(expr) == result_cnf
1175 assert algebra.dnf(expr) == result_dnf 1176 assert algebra.cnf(expr) == result_cnf 1177
1180 result_cnf = algebra.parse("(a|b)&(b|c)")
1181 assert algebra.dnf(expr) == result_dnf
1182 assert algebra.cnf(expr) == result_cnf
1181 assert algebra.dnf(expr) == result_dnf 1182 assert algebra.cnf(expr) == result_cnf 1183
1186 expected = algebra.parse("(a|s)&(b|e|s)&(c|d|e)&(c|e|s)&(d|s)")
1187 assert expected == result
1188
1191 exp = alg.parse("a and b or a and c")
1192 assert [
1193 alg.Symbol("a"),
1194 alg.Symbol("b"),
1195 alg.Symbol("a"),
1196 alg.Symbol("c"),
1197 ] == exp.get_literals()
1198
1201 exp = alg.parse("a and b or True and a and c")
1202 assert [
1203 alg.Symbol("a"),
1204 alg.Symbol("b"),
1205 alg.Symbol("a"),
1206 alg.Symbol("c"),
1207 ] == exp.get_symbols()
1208
1211 exp = alg.parse("a and b or a and c")
1212 assert set([alg.Symbol("a"), alg.Symbol("b"), alg.Symbol("c")]) == exp.literals
1213
1216 exp = alg.parse("a and not b and not not c")
1217 assert set([alg.Symbol("a"), alg.parse("not b"), alg.parse("not c")]) == exp.literals
1218
1221 exp = alg.parse("a and not b and not not c")
1222 assert set([alg.Symbol("a"), alg.Symbol("b"), alg.Symbol("c")]) == exp.symbols
1223
1226 exp = alg.parse("a and b or a and c")
1227 assert set(["a", "b", "c"]) == exp.objects
1228
1272 cnf = algebra.cnf(expr) 1273 assert str(cnf) == "a&c&f&g" 1274 # We should get exactly this count of calls.
1275 # before we had a combinatorial explosion
1276 assert counts == {"CountingAnd": 44, "CountingNot": 193, "CountingOr": 2490}
1277
1289 result = result.simplify() 1290 assert result == algebra.TRUE 1291
1293 result = result.simplify() 1294 assert result == algebra.TRUE 1295
1316 for c in [True, False]: 1317 assert exp(a=a, b=b, c=c) == (a and b and c) 1318
1324 for c in [True, False]: 1325 assert exp(a=a, b=b, c=c) == (a or b or c) 1326
1330 for a in [True, False]: 1331 assert exp(a=a) == (not a) 1332
1336 for a in [True, False]: 1337 assert exp(a=a) == a 1338
1344 for c in [True, False]: 1345 assert exp(a=a, b=b, c=c) == (not (a or b and (a or not c))) 1346
1351 for b in [True, False]: 1352 assert exp(a=a, b=b) == (not (a or b))
155 if request.method in self.invalidating_methods and resp.ok: 156 assert request.url is not None 157 cache_url = self.controller.cache_url(request.url)
44 match = URI.match(uri) 45 assert match is not None 46 groups = match.groups()
152 cache_url = request.url 153 assert cache_url is not None 154 cache_data = self.cache.get(cache_url)
173 """ 174 assert request.url is not None 175 cache_url = self.cache_url(request.url)
221 time_tuple = parsedate_tz(headers["date"]) 222 assert time_tuple is not None 223 date = calendar.timegm(time_tuple[:6])
360 time_tuple = parsedate_tz(response_headers["date"]) 361 assert time_tuple is not None 362 date = calendar.timegm(time_tuple[:6])
380 381 assert request.url is not None 382 cache_url = self.cache_url(request.url)
432 time_tuple = parsedate_tz(response_headers["date"]) 433 assert time_tuple is not None 434 date = calendar.timegm(time_tuple[:6])
479 """ 480 assert request.url is not None 481 cache_url = self.cache_url(request.url)
139 time_tuple = parsedate_tz(headers["date"]) 140 assert time_tuple is not None 141 date = calendar.timegm(time_tuple[:6])
433 def _get_cached_btype(self, type): 434 assert self._lock.acquire(False) is False 435 # call me with the lock!
788 BField, bitsize = name2fieldtype[fname] 789 assert bitsize < 0, \ 790 "not implemented: initializer with bit fields" 791 offset = CTypesStructOrUnion._offsetof(fname)
863 import traceback 864 assert not has_varargs, "varargs not supported for callbacks" 865 if getattr(BResult, '_ctype', None) is not None:
923 if has_varargs: 924 assert len(args) >= len(BArgs) 925 extraargs = args[len(BArgs):]
927 else: 928 assert len(args) == len(BArgs) 929 ctypes_args = []
949 def new_enum_type(self, name, enumerators, enumvalues, CTypesInt): 950 assert isinstance(name, str) 951 reverse_mapping = dict(zip(reversed(enumvalues),
990 else: 991 assert issubclass(cdata_or_BType, CTypesData) 992 return cdata_or_BType._get_size()
994 def alignof(self, BType): 995 assert issubclass(BType, CTypesData) 996 return BType._alignment()
1006 def callback(self, BType, source, error, onerror): 1007 assert onerror is None # XXX not implemented 1008 return BType(source, error)
1044 new_cdata = self.cast(self.typeof(cdata), cdata) 1045 assert new_cdata is not cdata 1046 weak_cache[MyRef(new_cdata, remove)] = (cdata, destructor)
9 if self.op is None: 10 assert isinstance(self.arg, str) 11 return '(_cffi_opcode_t)(%s)' % (self.arg,)
47 48 assert isinstance(result, model.BaseTypeByIdentity) 49 _CACHE[commontype] = result, quals
236 p2 = csource.find('...', p, match.end())
237 assert p2 > p
238 csource = '%s,__dotdotdot%d__ %s' % (csource[:p], number,
240 else: 241 assert csource[p:p+3] == '...' 242 csource = '%s __dotdotdot%d__ %s' % (csource[:p], number,
404 else: 405 assert 0 406 current_decl = None
512 tp, quals = self._get_type_and_quals(node, name=decl.name) 513 assert isinstance(tp, model.RawFunctionType) 514 self._declare_function(tp, quals, decl)
564 ast, macros = self._parse('void __dummy(\n%s\n);' % cdecl)[:2]
565 assert not macros
566 exprnode = ast.ext[-1].type.args.params[0]
579 "try cdef(xx, override=True))" % (name,)) 580 assert '__dotdotdot__' not in name.split() 581 self._declarations[name] = (obj, quals)
30 result = self.c_name_with_marker
31 assert result.count('&') == 1
32 # some logic duplication with ffi.getctype()... :-(
61 BType2 = ffi._cached_btypes.setdefault(self, BType) 62 assert BType2 is BType 63 return BType
160 def __init__(self, name): 161 assert name in self.ALL_PRIMITIVE_TYPES 162 self.name = name
340 def get_official_name(self):
341 assert self.c_name_with_marker.endswith('&')
342 return self.c_name_with_marker[:-1]
450 if isinstance(ftype, ArrayType) and ftype.length is None: 451 assert fsize == 0 452 else:
591 key = kwds.pop('key', (funcname, args))
592 assert not kwds
593 try:
1 # pkg-config, https://www.freedesktop.org/wiki/Software/pkg-config/ integration for cffi 2 import sys, os, subprocess 3
32 try: 33 pc = subprocess.Popen(a, stdout=subprocess.PIPE, stderr=subprocess.PIPE) 34 except EnvironmentError as e:
40 berr = berr.decode(encoding) 41 except Exception: 42 pass 43 raise PkgConfigError(berr.strip())
77 def as_python_expr(self): 78 flags = eval(self.flags, G_FLAGS) 79 fields_expr = [c_field.as_field_python_expr()
147 if tp.is_raw_function: 148 assert self._typesdict[tp] is None 149 self._typesdict[tp] = len(self.cffi_types)
151 for tp1 in tp.args: 152 assert isinstance(tp1, (model.VoidType, 153 model.BasePrimitiveType, 154 model.PointerType, 155 model.StructOrUnionOrEnum, 156 model.FunctionPtrType)) 157 if self._typesdict[tp1] is None:
168 self.cffi_types.append('LEN') # placeholder
169 assert None not in self._typesdict.values()
170 #
192 for op in self.cffi_types: 193 assert isinstance(op, CffiOp) 194 self.cffi_types = tuple(self.cffi_types) # don't change any more
265 for tp, i in self._struct_unions.items(): 266 assert i < len(lst) 267 assert lst[i].name == tp.name
266 assert i < len(lst) 267 assert lst[i].name == tp.name 268 assert len(lst) == len(self._struct_unions)
267 assert lst[i].name == tp.name 268 assert len(lst) == len(self._struct_unions) 269 # same with enums
271 for tp, i in self._enums.items(): 272 assert i < len(lst) 273 assert lst[i].name == tp.name
272 assert i < len(lst) 273 assert lst[i].name == tp.name 274 assert len(lst) == len(self._enums)
273 assert lst[i].name == tp.name 274 assert len(lst) == len(self._enums) 275
282 if self.target_is_python: 283 assert preamble is None 284 self.write_py_source_to_f(f)
285 else: 286 assert preamble is not None 287 self.write_c_source_to_f(f, preamble)
665 def _generate_cpy_function_decl(self, tp, name): 666 assert not self.target_is_python 667 assert isinstance(tp, model.FunctionPtrType)
666 assert not self.target_is_python 667 assert isinstance(tp, model.FunctionPtrType) 668 if tp.ellipsis:
1084 else:
1085 assert check_value is None
1086 prnt('static void %s(char *o)' % funcname)
1226 def _generate_cpy_extern_python_collecttype(self, tp, name): 1227 assert isinstance(tp, model.FunctionPtrType) 1228 self._do_collect_type(tp)
1398 else: 1399 assert self.cffi_types[index + 1] == 'LEN' 1400 self.cffi_types[index] = CffiOp(OP_ARRAY, item_index)
1457 def make_c_source(ffi, module_name, preamble, target_c_file, verbose=False): 1458 assert preamble is not None 1459 return _make_c_or_py_source(ffi, module_name, preamble, target_c_file,
25 code = compile(src, filename, 'exec') 26 exec(code, glob, glob) 27
223 def cffi_modules(dist, attr, value): 224 assert attr == 'cffi_modules' 225 if isinstance(value, basestring):
344 def _generate_cpy_function_collecttype(self, tp, name): 345 assert isinstance(tp, model.FunctionPtrType) 346 if tp.ellipsis:
355 def _generate_cpy_function_decl(self, tp, name): 356 assert isinstance(tp, model.FunctionPtrType) 357 if tp.ellipsis:
460 def _generate_cpy_struct_decl(self, tp, name): 461 assert name == tp.name 462 self._generate_struct_or_union_decl(tp, 'struct', name)
471 def _generate_cpy_union_decl(self, tp, name): 472 assert name == tp.name 473 self._generate_struct_or_union_decl(tp, 'union', name)
553 tp.force_flatten() 554 assert len(fieldofs) == len(fieldsize) == len(tp.fldnames) 555 tp.fixedlayout = fieldofs, fieldsize, totalsize, totalalignment
587 i += 2 588 assert i == len(layout) 589
632 else: 633 assert category == 'const' 634 #
645 'variable type'),)) 646 assert delayed 647 else:
784 if tp.length_is_unknown(): 785 assert isinstance(value, tuple) 786 (value, size) = value
140 def _generate_gen_function_decl(self, tp, name): 141 assert isinstance(tp, model.FunctionPtrType) 142 if tp.ellipsis:
187 def _loaded_gen_function(self, tp, name, module, library): 188 assert isinstance(tp, model.FunctionPtrType) 189 if tp.ellipsis:
242 def _generate_gen_struct_decl(self, tp, name): 243 assert name == tp.name 244 self._generate_struct_or_union_decl(tp, 'struct', name)
252 def _generate_gen_union_decl(self, tp, name): 253 assert name == tp.name 254 self._generate_struct_or_union_decl(tp, 'union', name)
332 tp.force_flatten() 333 assert len(fieldofs) == len(fieldsize) == len(tp.fldnames) 334 tp.fixedlayout = fieldofs, fieldsize, totalsize, totalalignment
366 i += 2 367 assert i == len(layout) 368
399 if check_value is not None: 400 assert is_int 401 assert category == 'const'
400 assert is_int
401 assert category == 'const'
402 prnt('int %s(char *out_error)' % funcname)
407 elif is_int:
408 assert category == 'const'
409 prnt('int %s(long long *out_value)' % funcname)
414 else: 415 assert tp is not None 416 assert check_value is None
415 assert tp is not None 416 assert check_value is None 417 if category == 'var':
439 if check_value is not None: 440 assert is_int 441 self._load_known_int_constant(module, funcname)
453 else: 454 assert check_value is None 455 fntypeextra = '(*)(void)'
211 def _load_library(self): 212 assert self._has_module 213 if self.flags is not None:
73 self.detach() 74 except Exception: 75 pass 76
166 return False 167 except Exception: 168 pass 169 return default
428 os.path.dirname(filename),
429 f".__atomic-write{random.randrange(1 << 32):08x}",
430 )
551 _ansi_stream_wrappers[stream] = rv 552 except Exception: 553 pass 554
599 cache[stream] = rv 600 except Exception: 601 pass 602 return rv
438 439 import subprocess 440
455 [str(cmd_path)] + cmd_params, 456 shell=False, 457 stdin=subprocess.PIPE, 458 env=env, 459 errors="replace", 460 text=True, 461 ) 462 assert c.stdin is not None 463 try: 464 for text in generator:
461 ) 462 assert c.stdin is not None 463 try:
530 531 import subprocess 532 import tempfile
542 try: 543 subprocess.call([str(cmd_path), filename]) 544 except OSError:
594 def edit_files(self, filenames: cabc.Iterable[str]) -> None: 595 import subprocess 596
676 def open_url(url: str, wait: bool = False, locate: bool = False) -> int: 677 import subprocess 678
695 try: 696 return subprocess.Popen(args, stderr=null).wait() 697 finally:
709 try: 710 return subprocess.call(args) 711 except OSError:
723 try: 724 return subprocess.call(args) 725 except OSError:
733 url = _unquote_file(url) 734 c = subprocess.Popen(["xdg-open", url]) 735 if wait:
733 url = _unquote_file(url) 734 c = subprocess.Popen(["xdg-open", url]) 735 if wait:
33 34 assert sys.platform == "win32" 35 import msvcrt # noqa: E402
207 self.flush() 208 except Exception: 209 pass 210 return self.buffer.write(x)
1661 if args and callable(args[0]): 1662 assert len(args) == 1 and not kwargs, ( 1663 "Use 'command(**kwargs)(callable)' to provide arguments." 1664 ) 1665 (func,) = args
1710 if args and callable(args[0]): 1711 assert len(args) == 1 and not kwargs, ( 1712 "Use 'group(**kwargs)(callable)' to provide arguments." 1713 ) 1714 (func,) = args
1867 cmd_name, cmd, args = self.resolve_command(ctx, args) 1868 assert cmd is not None 1869 ctx.invoked_subcommand = cmd_name
1889 cmd_name, cmd, args = self.resolve_command(ctx, args) 1890 assert cmd is not None 1891 sub_ctx = cmd.make_context(
2596 # not to be exposed. We still assert it here to please the type checker.
2597 assert self.name is not None, (
2598 f"{self!r} parameter's name should not be None when exposing value."
2599 )
2600 ctx.params[self.name] = value
3119 """ 3120 assert self.prompt is not None 3121
210 name = None 211 assert cls is None, "Use 'command(cls=cls)(callable)' to specify a class." 212 assert not attrs, "Use 'command(**kwargs)(callable)' to provide arguments."
211 assert cls is None, "Use 'command(cls=cls)(callable)' to specify a class." 212 assert not attrs, "Use 'command(**kwargs)(callable)' to provide arguments." 213
235 if t.TYPE_CHECKING: 236 assert cls is not None 237 assert not callable(name)
236 assert cls is not None 237 assert not callable(name) 238
192 if self.nargs > 1: 193 assert isinstance(value, cabc.Sequence) 194 holes = sum(1 for x in value if x is UNSET)
312 import shutil 313 import subprocess 314
319 else:
320 output = subprocess.run(
321 [bash_exe, "--norc", "-c", 'echo "${BASH_VERSION}"'],
322 stdout=subprocess.PIPE,
323 )
324 match = re.search(r"^(\d+)\.(\d+)\.\d+", output.stdout.decode())
513 514 assert param.name is not None 515 # Will be None if expose_value is False.
408 del os.environ[key] 409 except Exception: 410 pass 411 else:
418 del os.environ[key] 419 except Exception: 420 pass 421 else:
41 return func(*args, **kwargs) 42 except Exception: 43 pass 44 return None
79 dest_file = os.path.join(self.directory, flat_rootname(fr.relative_filename()))
80 assert dest_file.endswith("_py")
81 dest_file = dest_file[:-3] + ".py"
34 ops = {op for name in op_names if (op := dis.opmap.get(name))}
35 assert ops, f"At least one opcode must exist: {op_names}"
36 return ops
75 76 assert inst is not None 77 self.max_offset = inst.offset
710 """Display an error message, or the named topic.""" 711 assert error or topic or parser 712
742 else: 743 assert topic is not None 744 help_msg = textwrap.dedent(HELP_TOPICS.get(topic, "")).strip()
790 return ERR 791 assert options is not None 792
110 self.warn = warn
111 assert isinstance(concurrency, list), f"Expected a list: {concurrency!r}"
112
337 """Stop collecting trace information.""" 338 assert self._collectors 339 if self._collectors[-1] is not self:
342 print(f" {c!r}\n{c.origin}")
343 assert self._collectors[-1] is self, (
344 f"Expected current collector to be {self!r}, but it's {self._collectors[-1]!r}"
345 )
346
404 file_tracer = disposition.file_tracer 405 assert file_tracer is not None 406 plugin = file_tracer._coverage_plugin
431 else: # pragma: cant happen 432 assert isinstance(runtime_err, Exception) 433 raise runtime_err
508 self.paths = {}
509 assert isinstance(value, Mapping)
510 for k, v in value.items():
510 for k, v in value.items(): 511 assert isinstance(v, Iterable) 512 self.paths[k] = list(v)
657 config_file = ".coveragerc" 658 assert isinstance(config_file, str) 659 files_to_try = [
444 """ 445 assert self._inorout is not None 446 disp = self._inorout.should_trace(filename, frame)
456 """ 457 assert self._inorout is not None 458 reason = self._inorout.check_include_omit_etc(filename, frame)
491 if once: 492 assert slug is not None 493 self._no_warn_slugs.add(slug)
559 if not should_skip: 560 assert self._data is not None 561 self._data.read()
618 619 assert self._data is not None 620 self._collector.use_data(self._data, self.config.context)
696 697 assert self._collector is not None 698 assert self._inorout is not None
697 assert self._collector is not None 698 assert self._inorout is not None 699
722 if self._started: 723 assert self._collector is not None 724 self._collector.stop()
770 self._init_data(suffix=None) 771 assert self._data is not None 772 self._data.erase(parallel=self.config.parallel)
791 792 assert self._collector is not None 793 if self._collector.should_start_context:
897 898 assert self._data is not None 899 combine_parallel_data(
929 930 assert self._data is not None 931 return self._data
939 """ 940 assert self._data is not None 941 assert self._inorout is not None
940 assert self._data is not None 941 assert self._inorout is not None 942
1023 """Get a FileReporter for a module or file name.""" 1024 assert self._data is not None 1025 plugin = None
1046 1047 assert isinstance(file_reporter, FileReporter) 1048 return file_reporter
1063 """ 1064 assert self._data is not None 1065 if not morfs:
46 lines = data.lines(filename) 47 assert lines is not None 48 summ[filename_fn(filename)] = len(lines)
152 LABEL_LEN = 30 153 assert all(len(l) < LABEL_LEN for l, _ in info) 154 for label, data in info:
533 """Just like file.write, but filter through all our filters.""" 534 assert self.outfile is not None 535 if not self.outfile.closed:
540 """Flush our file.""" 541 assert self.outfile is not None 542 if not self.outfile.closed:
142 self.loader = DummyLoader(self.modulename) 143 assert pathname is not None 144 self.pathname = os.path.abspath(pathname)
212 try: 213 exec(code, main_mod.__dict__) 214 except SystemExit: # pylint: disable=try-except-raise
224 typ, err, tb = sys.exc_info() 225 assert typ is not None 226 assert err is not None
225 assert typ is not None 226 assert err is not None 227 assert tb is not None
226 assert err is not None 227 assert tb is not None 228
236 try: 237 assert err.__traceback__ is not None 238 err.__traceback__ = err.__traceback__.tb_next
246 typ2, err2, tb2 = sys.exc_info() 247 assert typ2 is not None 248 assert err2 is not None
247 assert typ2 is not None 248 assert err2 is not None 249 assert tb2 is not None
248 assert err2 is not None 249 assert tb2 is not None 250 err2.__suppress_context__ = True
250 err2.__suppress_context__ = True 251 assert err2.__traceback__ is not None 252 err2.__traceback__ = err2.__traceback__.tb_next
325 # The rest of the file is the code object we want. 326 code = marshal.load(fpyc) 327 assert isinstance(code, CodeType)
326 code = marshal.load(fpyc) 327 assert isinstance(code, CodeType) 328
434 if self.extra_css: 435 assert self.config.extra_css is not None 436 self.copy_static_file(self.config.extra_css, slug="extra_css")
526 # two missing, we would have written one as "always raised."
527 assert len(longs) == 1, (
528 f"Had long annotations in {ftr.fr.relative_filename()}: {longs}"
529 )
530 ldata.annotate_long = longs[0]
138 spec = importlib.util.find_spec(modulename) 139 except Exception: 140 pass 141 else:
125 # it can mean the line always raised an exception. 126 assert len(executed_arcs[line]) == 0 127 destinations = [(dst, "-") for dst in missing_arcs[line]]
295 spec = importlib.util.spec_from_file_location(modname, modfile) 296 assert spec is not None 297 mod = importlib.util.module_from_spec(spec)
298 sys.modules[modname] = mod 299 assert spec.loader is not None 300 spec.loader.exec_module(mod)
39 _debug = cov._debug
40 assert _debug is not None
41 if _debug.should("multiproc"):
50 """ 51 assert text or filename, "PythonParser needs either text or filename" 52 self.filename = filename or "<code>"
148 149 assert self.text is not None 150 tokgen = generate_tokens(self.text)
209 # functions and classes. 210 assert self._ast_root is not None 211 for node in ast.walk(self._ast_root):
289 self._analyze_ast() 290 assert self._all_arcs is not None 291 return self._all_arcs
298 """ 299 assert self._ast_root is not None 300 aaa = AstArcAnalyzer(self.filename, self._ast_root, self.raw_statements, self.multiline_map)
364 for l1, l2 in self.arcs():
365 assert l1 > 0, f"{l1=} should be greater than zero in {self.filename}"
366 if l1 in self.excluded:
389 self._analyze_ast() 390 assert self._missing_arc_fragments is not None 391
408 self._analyze_ast() 409 assert self._missing_arc_fragments is not None 410
428 else: 429 assert filename is not None 430 # We only get here if earlier ast parsing succeeded, so no need to
645 if node.id in ["True", "False", "None", "__debug__"]: 646 return True, eval(node.id) # pylint: disable=eval-used 647 case ast.UnaryOp():
745 continue
746 assert len(nexts) == 1, f"Expected one arc, got {nexts} with {start = }"
747 nxt = nexts.pop()
938 else: 939 assert from_start is None 940
1012 last = dec_start 1013 assert last is not None 1014 self.add_arc(last, main_line)
1019 # it to the first one.
1020 assert node.body, f"Oops: {node.body = } in {self.filename}@{node.lineno}"
1021 # The body is handled in collect_arcs.
1021 # The body is handled in collect_arcs.
1022 assert last is not None
1023 return {ArcStart(last)}
1041 my_block = self.block_stack.pop() 1042 assert isinstance(my_block, LoopBlock) 1043 exits = my_block.break_exits
1126 # or finally, or both. 1127 assert handler_start is not None or final_start is not None 1128 try_block = TryBlock(handler_start, final_start)
1180 my_block = self.block_stack.pop() 1181 assert isinstance(my_block, LoopBlock) 1182 exits.update(my_block.break_exits)
131 atexit.register(delete_pth_files) 132 assert config.config_file is not None 133 os.environ["COVERAGE_PROCESS_CONFIG"] = config.serialize()
178 reporter = self.plugin.file_reporter(filename)
179 assert isinstance(reporter, FileReporter)
180 self.debug.write(f"file_reporter({filename!r}) --> {reporter!r}")
185 """Lazily create a :class:`PythonParser`.""" 186 assert self.coverage is not None 187 if self._parser is None:
213 def no_branch_lines(self) -> set[TLineNo]: 214 assert self.coverage is not None 215 no_branch = self.parser.lines_matching(
127 """For hard-core logging of what this tracer is doing."""
128 with open("/tmp/debug_trace.txt", "a", encoding="utf-8") as f:
129 f.write(f"{marker} {self.id}[{len(self.data_stack)}]")
193 started_context = True 194 assert self.switch_context is not None 195 self.switch_context(self.context) # pylint: disable=not-callable
229 tracename = disp.source_filename 230 assert tracename is not None 231 self.lock_data()
305 if self.started_context: 306 assert self.switch_context is not None 307 self.context = None
337 if self.threading: 338 assert self.thread is not None 339 if self.thread.ident != self.threading.current_thread().ident:
40 """Write a line to the output, adding a newline.""" 41 assert self.outfile is not None 42 self.outfile.write(line.rstrip())
159 for l1, l2 in missing:
160 assert l1 != l2, f"In {self.filename}, didn't expect {l1} == {l2}"
161 if l1 in branch_lines:
175 for l1, l2 in self.arcs_executed:
176 assert l1 != l2, f"Oops: Didn't think this could happen: {l1 = }, {l2 = }"
177 if (l1, l2) not in self.arc_possibilities_set:
398 # Implementing 0+Numbers allows us to sum() a list of Numbers. 399 assert other == 0 # we only ever call it this way. 400 return self
318 row = db.execute_one("select version from coverage_schema")
319 assert row is not None
320 except Exception as exc:
451 """Get the id for a context.""" 452 assert context is not None 453 self._start_using()
589 """Force the data file to choose between lines and arcs.""" 590 assert lines or arcs 591 assert not (lines and arcs)
590 assert lines or arcs 591 assert not (lines and arcs) 592 if lines and self._has_arcs:
727 with self._connect() as con: 728 assert con.con is not None 729 con.con.isolation_level = "IMMEDIATE"
987 context_clause = " or ".join(["context REGEXP ?"] * len(contexts))
988 with con.execute("SELECT id FROM context WHERE " + context_clause, contexts) as cur:
989 self._query_context_ids = [row[0] for row in cur.fetchall()]
1146 # if the process forks. 1147 die = random.Random(os.urandom(8)) 1148 letters = string.ascii_uppercase + string.ascii_lowercase
96 self._connect() 97 assert self.con is not None 98 self.con.__enter__()
105 try: 106 assert self.con is not None 107 self.con.__exit__(exc_type, exc_value, traceback)
119 try: 120 assert self.con is not None 121 try:
140 )
141 except Exception:
142 pass
143 if self.debug.should("sql"):
178 with self.execute(sql, parameters) as cur: 179 assert cur.lastrowid is not None 180 rowid: int = cur.lastrowid
210 self.debug.write(f"{i:4d}: {row!r}")
211 assert self.con is not None
212 try:
232 ) 233 assert self.con is not None 234 self.con.executescript(script).close()
237 """Return a multi-line string, the SQL dump of the database.""" 238 assert self.con is not None 239 return "\n".join(self.con.iterdump())
77 sys_monitoring = LoggingWrapper(sys_monitoring, "sys.monitoring") 78 assert sys_monitoring is not None 79
100 for filename in [
101 "/tmp/foo.out",
102 # f"{root}.out",
103 # f"{root}-{pid}.out",
104 # f"{root}-{pid}-{tslug}.out",
105 ]:
106 with open(filename, "a", encoding="utf-8") as f:
107 try:
143 try: 144 assert sys_monitoring is not None 145 sys_monitoring.set_events(sys.monitoring.COVERAGE_ID, 0)
251 with self.lock: 252 assert sys_monitoring is not None 253 sys_monitoring.use_tool_id(self.myid, "coverage.py")
283 return 284 assert sys_monitoring is not None 285 sys_monitoring.set_events(self.myid, 0)
352 tracename = disp.source_filename 353 assert tracename is not None 354 self.lock_data()
381 if self.sysmon_on: 382 assert sys_monitoring is not None 383 local_events = events.PY_RETURN | events.PY_RESUME | events.LINE
384 if self.trace_arcs: 385 assert env.PYBEHAVIOR.branch_right_left 386 local_events |= (
485 # This could be an exception jumping from line to line. 486 assert code_info.byte_to_line is not None 487 l1 = code_info.byte_to_line.get(instruction_offset)
67 # A check that the caller really finished all the blocks they started. 68 assert self.indent_level == 0 69 # Get the Python source as a single string.
72 global_namespace: dict[str, Any] = {}
73 exec(python_source, global_namespace)
74 return global_namespace
51 # ever call this with a single filename. 52 assert isinstance(filenames, (bytes, str, os.PathLike)) 53 filename = os.fspath(filenames)
105 raise ConfigError(f"No section: {section!r}")
106 assert name is not None
107 try:
23 """Create a readable version string from version_info tuple components."""
24 assert releaselevel in ["alpha", "beta", "candidate", "final"]
25 version = f"{major}.{minor}.{micro}"
11 import time 12 import xml.dom.minidom 13 from collections.abc import Iterable
92 impl = xml.dom.minidom.getDOMImplementation() 93 assert impl is not None 94 self.xml_out = impl.createDocument(None, "coverage", None)
97 xcoverage = self.xml_out.documentElement
98 assert xcoverage is not None
99 xcoverage.setAttribute("version", __version__)
128 ] = {
129 SignatureAlgorithmOID.RSA_WITH_MD5: hashes.MD5(),
130 SignatureAlgorithmOID.RSA_WITH_SHA1: hashes.SHA1(),
129 SignatureAlgorithmOID.RSA_WITH_MD5: hashes.MD5(), 130 SignatureAlgorithmOID.RSA_WITH_SHA1: hashes.SHA1(), 131 SignatureAlgorithmOID._RSA_WITH_SHA1: hashes.SHA1(),
130 SignatureAlgorithmOID.RSA_WITH_SHA1: hashes.SHA1(), 131 SignatureAlgorithmOID._RSA_WITH_SHA1: hashes.SHA1(), 132 SignatureAlgorithmOID.RSA_WITH_SHA224: hashes.SHA224(),
139 SignatureAlgorithmOID.RSA_WITH_SHA3_512: hashes.SHA3_512(), 140 SignatureAlgorithmOID.ECDSA_WITH_SHA1: hashes.SHA1(), 141 SignatureAlgorithmOID.ECDSA_WITH_SHA224: hashes.SHA224(),
148 SignatureAlgorithmOID.ECDSA_WITH_SHA3_512: hashes.SHA3_512(), 149 SignatureAlgorithmOID.DSA_WITH_SHA1: hashes.SHA1(), 150 SignatureAlgorithmOID.DSA_WITH_SHA224: hashes.SHA224(),
114 aead_cipher = _evp_aead_get_cipher(backend, cipher) 115 assert aead_cipher is not None 116 key_ptr = backend._ffi.from_buffer(key)
135 # Currently only ChaCha20-Poly1305 is supported using this API 136 assert isinstance(cipher, ChaCha20Poly1305) 137 return backend._lib.EVP_aead_chacha20_poly1305()
148 ) -> bytes: 149 assert ctx is not None 150
151 aead_cipher = _evp_aead_get_cipher(backend, cipher) 152 assert aead_cipher is not None 153
192 193 assert ctx is not None 194
246 else:
247 assert isinstance(cipher, AESGCM)
248 return f"aes-{len(cipher._key) * 8}-gcm".encode("ascii")
319 if operation == _DECRYPT: 320 assert tag is not None 321 _evp_cipher_set_tag(backend, ctx, tag)
178 self._binding._enable_fips() 179 assert rust_openssl.is_fips_enabled() 180 self._fips_enabled = rust_openssl.is_fips_enabled()
348 def _bn_to_int(self, bn) -> int: 349 assert bn != self._ffi.NULL 350 self.openssl_assert(not self._lib.BN_is_negative(bn))
922 else: 923 assert userdata.error == -2 924 raise ValueError(
937 938 assert ( 939 password is not None and userdata.called == 1 940 ) or password is None 941
1331 else: 1332 assert key_type == self._lib.EVP_PKEY_EC 1333 write_bio = self._lib.PEM_write_bio_ECPrivateKey
1346 else: 1347 assert key_type == self._lib.EVP_PKEY_EC 1348 write_bio = self._lib.i2d_ECPrivateKey_bio
1755 else: 1756 assert keycertalg is None 1757 # We use OpenSSL's defaults
271 else: 272 assert format is serialization.PublicFormat.UncompressedPoint 273 conversion = self._backend._lib.POINT_CONVERSION_UNCOMPRESSED
251 if isinstance(padding, PSS): 252 assert isinstance(algorithm, hashes.HashAlgorithm) 253 res = backend._lib.EVP_PKEY_CTX_set_rsa_pss_saltlen(
100 salt_length = emlen - hash_algorithm.digest_size - 2 101 assert salt_length >= 0 102 return salt_length
292 q, r = divmod(n, p) 293 assert r == 0 294 p, q = sorted((p, q), reverse=True)
90 # type. Maybe it won't some time in the future. 91 assert isinstance(mode, modes.Mode) 92 mode.validate_for_algorithm(algorithm)
216 # mypy needs this assert even though _check_limit already checked 217 assert self._ctx is not None 218 return self._ctx.update(data)
222 # mypy needs this assert even though _check_limit already checked 223 assert self._ctx is not None 224 return self._ctx.update_into(data, buf)
267 ) 268 assert self._tag is not None 269 return self._tag
49 ): 50 assert callable(prf) 51
275 def _prf(self, _: bytes) -> cmac.CMAC: 276 assert self._cipher is not None 277
282 283 assert self._cipher is not None 284
20 # RFC 3394 Key Wrap - 2.2.1 (index method) 21 encryptor = Cipher(AES(wrapping_key), ECB()).encryptor() 22 n = len(r)
33 34 assert encryptor.finalize() == b"" 35
63 # Implement RFC 3394 Key Unwrap - 2.2.2 (index method) 64 decryptor = Cipher(AES(wrapping_key), ECB()).decryptor() 65 n = len(r)
76 77 assert decryptor.finalize() == b"" 78 return a, r
96 # RFC 5649 - 4.1 - exactly 8 octets after padding 97 encryptor = Cipher(AES(wrapping_key), ECB()).encryptor() 98 b = encryptor.update(aiv + key_to_wrap)
98 b = encryptor.update(aiv + key_to_wrap) 99 assert encryptor.finalize() == b"" 100 return b
118 # RFC 5649 - 4.2 - exactly two 64-bit blocks 119 decryptor = Cipher(AES(wrapping_key), ECB()).decryptor() 120 out = decryptor.update(wrapped_key)
120 out = decryptor.update(wrapped_key) 121 assert decryptor.finalize() == b"" 122 a = out[:8]
678 if _SSH_CIPHERS[ciphername_bytes].is_aead: 679 assert isinstance(dec, AEADDecryptionContext) 680 _check_empty(dec.finalize_with_tag(tag))
927 else: 928 assert isinstance(signature_key, rsa.RSAPublicKey) 929 if self._inner_sig_type == _SSH_RSA:
929 if self._inner_sig_type == _SSH_RSA: 930 hash_alg = hashes.SHA1() 931 elif self._inner_sig_type == _SSH_RSA_SHA256:
933 else: 934 assert self._inner_sig_type == _SSH_RSA_SHA512 935 hash_alg = hashes.SHA512()
949 else: 950 assert isinstance(curve, ec.SECP521R1) 951 return hashes.SHA512()
1513 else: 1514 assert isinstance(private_key, rsa.RSAPrivateKey) 1515 # Just like Golang, we're going to use SHA512 for RSA
70 71 return hashlib.sha1(data).digest() 72
138 ):
139 assert isinstance(value, str)
140 c_len = len(value.encode("utf8"))
319 **__: Any) -> List[Any]: 320 assert view is not None 321 return [
333 **__: Any) -> XmlElement: 334 assert view is not None 335 elem = XmlElement(element_name)
662 **__: Any) -> str: 663 assert view is not None 664 return cls.__normalize(o, view)
669 **__: Any) -> str: 670 assert view is not None 671 return cls.__normalize(o, view)
335 **__: Any) -> Optional[str]: 336 assert view is not None 337 return cls.__normalize(o, view)
342 **__: Any) -> Optional[str]: 343 assert view is not None 344 return cls.__normalize(o, view)
414 **__: Any) -> Optional[str]: 415 assert view is not None 416 return cls.__normalize(o, view)
421 **__: Any) -> Optional[str]: 422 assert view is not None 423 return cls.__normalize(o, view)
679 **__: Any) -> str: 680 assert view is not None 681 return cls.__normalize(o, view)
686 **__: Any) -> str: 687 assert view is not None 688 return cls.__normalize(o, view)
39 __IDS: Set[str] = set(json_load(schema).get('enum', []))
40 assert len(__IDS) > 0, 'known SPDX-IDs should be non-empty set'
41
11 import warnings 12 from xml.etree.ElementTree import ParseError 13 from xml.etree.ElementTree import TreeBuilder as _TreeBuilder
12 from xml.etree.ElementTree import ParseError 13 from xml.etree.ElementTree import TreeBuilder as _TreeBuilder 14 from xml.etree.ElementTree import parse as _parse
13 from xml.etree.ElementTree import TreeBuilder as _TreeBuilder 14 from xml.etree.ElementTree import parse as _parse 15 from xml.etree.ElementTree import tostring
14 from xml.etree.ElementTree import parse as _parse 15 from xml.etree.ElementTree import tostring 16
21 else: 22 from xml.etree.ElementTree import XMLParser as _XMLParser 23 from xml.etree.ElementTree import iterparse as _iterparse
22 from xml.etree.ElementTree import XMLParser as _XMLParser 23 from xml.etree.ElementTree import iterparse as _iterparse 24
13 14 from xml.etree.cElementTree import TreeBuilder as _TreeBuilder 15 from xml.etree.cElementTree import parse as _parse
14 from xml.etree.cElementTree import TreeBuilder as _TreeBuilder 15 from xml.etree.cElementTree import parse as _parse 16 from xml.etree.cElementTree import tostring
15 from xml.etree.cElementTree import parse as _parse 16 from xml.etree.cElementTree import tostring 17
18 # iterparse from ElementTree! 19 from xml.etree.ElementTree import iterparse as _iterparse 20
75 def _apply_defusing(defused_mod): 76 assert defused_mod is sys.modules[defused_mod.__name__] 77 stdlib_name = defused_mod.__origin__
9 10 from xml.dom.expatbuilder import ExpatBuilder as _ExpatBuilder 11 from xml.dom.expatbuilder import Namespaces as _Namespaces
10 from xml.dom.expatbuilder import ExpatBuilder as _ExpatBuilder 11 from xml.dom.expatbuilder import Namespaces as _Namespaces 12
9 10 from xml.sax.expatreader import ExpatParser as _ExpatParser 11
14 15 from lxml import etree as _etree 16
134 parser = getDefaultParser() 135 elementtree = _etree.parse(source, parser, base_url=base_url) 136 check_docinfo(elementtree, forbid_dtd, forbid_entities)
142 parser = getDefaultParser() 143 rootelement = _etree.fromstring(text, parser, base_url=base_url) 144 elementtree = rootelement.getroottree()
9 10 from xml.dom.minidom import _do_pulldom_parse 11 from . import expatbuilder as _expatbuilder
9 10 from xml.dom.pulldom import parse as _parse 11 from xml.dom.pulldom import parseString as _parseString
10 from xml.dom.pulldom import parse as _parse 11 from xml.dom.pulldom import parseString as _parseString 12 from .sax import make_parser
29 parser.forbid_external = forbid_external 30 return _parse(stream_or_string, parser, bufsize) 31
40 parser.forbid_external = forbid_external 41 return _parseString(string, parser)
9 10 from xml.sax import InputSource as _InputSource 11 from xml.sax import ErrorHandler as _ErrorHandler
10 from xml.sax import InputSource as _InputSource 11 from xml.sax import ErrorHandler as _ErrorHandler 12
24 __origin__ = "xmlrpclib" 25 from xmlrpclib import ExpatParser 26 import xmlrpclib as xmlrpc_client
25 from xmlrpclib import ExpatParser 26 import xmlrpclib as xmlrpc_client 27
28 xmlrpc_server = None 29 from xmlrpclib import gzip_decode as _orig_gzip_decode 30 from xmlrpclib import GzipDecodedResponse as _OrigGzipDecodedResponse
29 from xmlrpclib import gzip_decode as _orig_gzip_decode 30 from xmlrpclib import GzipDecodedResponse as _OrigGzipDecodedResponse 31
54 import builtins as __builtin__ 55 from pickle import _Pickler as StockPickler, Unpickler as StockUnpickler 56 from pickle import GLOBAL, POP
55 from pickle import _Pickler as StockPickler, Unpickler as StockUnpickler 56 from pickle import GLOBAL, POP 57 from _thread import LockType
78 from types import MappingProxyType as DictProxyType, new_class 79 from pickle import DEFAULT_PROTOCOL, HIGHEST_PROTOCOL, PickleError, PicklingError, UnpicklingError 80 import __main__ as _main_module
594 def _unmarshal(string): 595 return marshal.loads(string) 596
886 r.name = name 887 assert f.name == name 888 else:
978 def _eval_repr(repr_str): 979 return eval(repr_str) 980
1019 break 1020 except Exception: 1021 pass 1022
1036 attr = repr_str.split("'")[3]
1037 return eval(attr+'.__dict__["'+name+'"]')
1038 except Exception:
2215 isinstance(python, str) 2216 import subprocess 2217 fail = True
2230 msg = "%s -c import dill; print(dill.loads(%s))" % (python, repr(_obj)) 2231 msg = "SUCCESS" if not subprocess.call(msg.split(None,2)) else "LOAD FAILED" 2232 if verbose:
36 import tempfile 37 import shelve 38 import zlib
462 # cryptographic services (CH 14) 463 x['HashType'] = hashlib.md5() 464 if (sys.hexversion < 0x30800a1):
516 517 from dill._dill import _testcapsule 518 if _testcapsule is not None:
55 if getsourcelines(obj) == (lines,lnum): return obj 56 except Exception: #TypeError, IOError 57 pass 58 return #XXX: or raise? no matches
242 """get objects that fail to pickle""" 243 from dill import pickles 244 if not depth:
251 """get types for objects that fail to pickle""" 252 from dill import pickles 253 if not depth:
260 """get errors for objects that fail to pickle""" 261 from dill import pickles, copy 262 if not depth:
265 if exact: 266 assert pik == obj, \ 267 "Unpickling produces %s instead of %s" % (pik,obj) 268 assert type(pik) == type(obj), \
267 "Unpickling produces %s instead of %s" % (pik,obj) 268 assert type(pik) == type(obj), \ 269 "Unpickling produces %s instead of %s" % (type(pik),type(obj)) 270 return None
54 55 import dill 56
15 # non-local import of dill.objects 16 from dill import objects 17 for _type in objects.keys():
17 for _type in objects.keys():
18 exec("%s = type(objects['%s'])" % (_type,_type))
19
29 30 from dill import _dill, Pickler, Unpickler 31 from ._dill import (
300 return io.BufferedReader(stream) 301 except Exception: 302 pass 303 return _PeekableReader(stream)
306 """identify the name of the module stored in the given file-type object"""
307 from pickletools import genops
308 UNICODE = {'UNICODE', 'BINUNICODE', 'SHORT_BINUNICODE'}
447 #FIXME: dill.settings are disabled 448 unpickler = Unpickler(file, **kwds) 449 unpickler._session = True
500 pass 501 assert loaded is main 502 _restore_modules(unpickler, main)
11 12 from pickle import DEFAULT_PROTOCOL 13
59 try: #FIXME: unsafe
60 _ = eval("lambda %s : %s" % (lhs,rhs), globals(),locals())
61 except Exception: _ = dummy
81 try: #FIXME: unsafe
82 _f = eval("lambda %s : %s" % (_lhs,_rhs), globals(),locals())
83 except Exception: _f = dummy
394 lines, lnum = ["%s = __import__('%s', fromlist=['%s']).%s\n" % (name,module,name,name)], 0
395 obj = eval(lines[0].lstrip(name + ' = '))
396 lines, lnum = getsourcelines(obj, enclosing=enclosing)
540 # _ = eval(getsource(f, force=True)) #XXX: safer but less robust 541 exec(getimportable(f, alias='_'), __globals__, __locals__) 542 except Exception:
571 """ 572 from dill import dumps 573 pik = repr(dumps(object))
654 return qual + [name] #XXX: can be wrong for some aliased objects 655 except Exception: pass 656 # special case: numpy.inf and numpy.nan (we don't want them as floats)
710 #print(_str) 711 try: exec(_str) #XXX: check if == obj? (name collision) 712 except ImportError: #XXX: better top-down or bottom-up recursion?
802 force=force, lstrip=lstrip, builtin=builtin) 803 except Exception: pass 804 try:
817 818 except Exception: pass 819 if not source: # try getsource, only if it hasn't been tried yet
822 force=force, lstrip=lstrip, builtin=builtin) 823 except Exception: pass 824 # get the name (of functions, lambdas, and classes)
70 local = {}
71 exec(source, local)
72 _ = eval("%s" % alias, local)
71 exec(source, local)
72 _ = eval("%s" % alias, local)
73 return _
129 """
130 import dill as pickle
131 mode = kwds.pop('mode', 'rb')
132 name = getattr(file, 'name', file) # name=file.name or name=file (if str) 133 return pickle.load(open(name, mode=mode, **kwds)) 134
158 """ 159 import dill as pickle 160 import tempfile
175 """ 176 import dill as pickle 177 from io import BytesIO as StringIO
179 if value != buffer: value = value() # buffer.getvalue() 180 return pickle.load(StringIO(value)) 181
189 """ 190 import dill as pickle 191 from io import BytesIO as StringIO
219 local = {}
220 exec(source, local)
221 _ = eval("%s" % alias, local)
220 exec(source, local)
221 _ = eval("%s" % alias, local)
222 return _
10 import sys 11 import subprocess as sp 12 python = sys.executable
27 for test in tests: 28 p = sp.Popen([python, test], shell=shell).wait() 29 if p:
9 """ 10 import dill 11 import abc
72 def test_abc_non_local(): 73 assert dill.copy(OneTwoThree) is not OneTwoThree 74 assert dill.copy(EasyAsAbc) is not EasyAsAbc
73 assert dill.copy(OneTwoThree) is not OneTwoThree 74 assert dill.copy(EasyAsAbc) is not EasyAsAbc 75
77 warnings.simplefilter("ignore", dill.PicklingWarning)
78 assert dill.copy(OneTwoThree, byref=True) is OneTwoThree
79 assert dill.copy(EasyAsAbc, byref=True) is EasyAsAbc
78 assert dill.copy(OneTwoThree, byref=True) is OneTwoThree 79 assert dill.copy(EasyAsAbc, byref=True) is EasyAsAbc 80
84 depickled = dill.copy(instance) 85 assert type(depickled) is type(instance) #NOTE: issue #612, test_abc_local 86 #NOTE: dill.copy of local (or non-local) classes should (not) be the same?
86 #NOTE: dill.copy of local (or non-local) classes should (not) be the same? 87 assert type(depickled.bar) is FunctionType 88 assert depickled.bar(3) == 9
87 assert type(depickled.bar) is FunctionType 88 assert depickled.bar(3) == 9 89 assert depickled.sfoo() == "Static Method SFOO"
88 assert depickled.bar(3) == 9 89 assert depickled.sfoo() == "Static Method SFOO" 90 assert depickled.cfoo() == "Class Method CFOO"
89 assert depickled.sfoo() == "Static Method SFOO" 90 assert depickled.cfoo() == "Class Method CFOO" 91 assert depickled.foo() == "Instance Method FOO"
90 assert depickled.cfoo() == "Class Method CFOO" 91 assert depickled.foo() == "Instance Method FOO" 92
105 labc = dill.copy(LocalABC) 106 assert labc is not LocalABC 107 assert type(labc) is type(LocalABC)
106 assert labc is not LocalABC 107 assert type(labc) is type(LocalABC) 108 #NOTE: dill.copy of local (or non-local) classes should (not) be the same?
119 real = Real() 120 assert real.foo() == "True!" 121
128 print('Failed to raise type error')
129 assert False
130
131 labc2, pik = dill.copy((labc, Real())) 132 assert 'Real' == type(pik).__name__ 133 assert '.Real' in type(pik).__qualname__
132 assert 'Real' == type(pik).__name__ 133 assert '.Real' in type(pik).__qualname__ 134 assert type(pik) is not Real
133 assert '.Real' in type(pik).__qualname__ 134 assert type(pik) is not Real 135 assert labc2 is not LocalABC
134 assert type(pik) is not Real 135 assert labc2 is not LocalABC 136 assert labc2 is not labc
135 assert labc2 is not LocalABC 136 assert labc2 is not labc 137 assert isinstance(pik, labc2)
136 assert labc2 is not labc 137 assert isinstance(pik, labc2) 138 assert not isinstance(pik, labc)
137 assert isinstance(pik, labc2) 138 assert not isinstance(pik, labc) 139 assert not isinstance(pik, LocalABC)
138 assert not isinstance(pik, labc) 139 assert not isinstance(pik, LocalABC) 140 assert pik.baz() == "My " + repr(pik)
139 assert not isinstance(pik, LocalABC) 140 assert pik.baz() == "My " + repr(pik) 141
155 156 assert not issubclass(KlassyClass, LocalMetaABC) 157 assert issubclass(ClassyClass, LocalMetaABC)
156 assert not issubclass(KlassyClass, LocalMetaABC) 157 assert issubclass(ClassyClass, LocalMetaABC) 158
160 161 lmabc, cc, kc = dill.loads(res) 162 assert type(lmabc) == type(LocalMetaABC)
161 lmabc, cc, kc = dill.loads(res) 162 assert type(lmabc) == type(LocalMetaABC) 163 assert not issubclass(kc, lmabc)
162 assert type(lmabc) == type(LocalMetaABC) 163 assert not issubclass(kc, lmabc) 164 assert issubclass(cc, lmabc)
163 assert not issubclass(kc, lmabc) 164 assert issubclass(cc, lmabc) 165
8 9 from dill import check 10 import sys
11 12 from dill.temp import capture 13
23 else: 24 assert 'Traceback' not in out.getvalue() 25 finally:
8 9 import dill 10 from enum import EnumMeta
67 def test_class_instances(): 68 assert dill.pickles(o) 69 assert dill.pickles(oc)
68 assert dill.pickles(o) 69 assert dill.pickles(oc) 70 assert dill.pickles(n)
69 assert dill.pickles(oc) 70 assert dill.pickles(n) 71 assert dill.pickles(nc)
70 assert dill.pickles(n) 71 assert dill.pickles(nc) 72 assert dill.pickles(m)
71 assert dill.pickles(nc) 72 assert dill.pickles(m) 73
88 for obj,cls in zip(_objlist,_clslist): 89 _cls = dill.loads(cls) 90 _obj = dill.loads(obj)
89 _cls = dill.loads(cls) 90 _obj = dill.loads(obj) 91 assert _obj.ok()
90 _obj = dill.loads(obj) 91 assert _obj.ok() 92 assert _cls.ok(_cls())
91 assert _obj.ok() 92 assert _cls.ok(_cls()) 93 if _cls.__name__ == "_mclass":
93 if _cls.__name__ == "_mclass": 94 assert type(_cls).__name__ == "_meta" 95
97 def test_specialtypes(): 98 assert dill.pickles(type(None)) 99 assert dill.pickles(type(NotImplemented))
98 assert dill.pickles(type(None)) 99 assert dill.pickles(type(NotImplemented)) 100 assert dill.pickles(type(Ellipsis))
99 assert dill.pickles(type(NotImplemented)) 100 assert dill.pickles(type(Ellipsis)) 101 assert dill.pickles(type(EnumMeta))
100 assert dill.pickles(type(Ellipsis)) 101 assert dill.pickles(type(EnumMeta)) 102
116 def test_namedtuple(): 117 assert Z is dill.loads(dill.dumps(Z)) 118 assert Zi == dill.loads(dill.dumps(Zi))
116 def test_namedtuple(): 117 assert Z is dill.loads(dill.dumps(Z)) 118 assert Zi == dill.loads(dill.dumps(Zi))
117 assert Z is dill.loads(dill.dumps(Z)) 118 assert Zi == dill.loads(dill.dumps(Zi)) 119 assert X is dill.loads(dill.dumps(X))
117 assert Z is dill.loads(dill.dumps(Z)) 118 assert Zi == dill.loads(dill.dumps(Zi)) 119 assert X is dill.loads(dill.dumps(X))
118 assert Zi == dill.loads(dill.dumps(Zi)) 119 assert X is dill.loads(dill.dumps(X)) 120 assert Xi == dill.loads(dill.dumps(Xi))
118 assert Zi == dill.loads(dill.dumps(Zi)) 119 assert X is dill.loads(dill.dumps(X)) 120 assert Xi == dill.loads(dill.dumps(Xi))
119 assert X is dill.loads(dill.dumps(X)) 120 assert Xi == dill.loads(dill.dumps(Xi)) 121 assert Defaults is dill.loads(dill.dumps(Defaults))
119 assert X is dill.loads(dill.dumps(X)) 120 assert Xi == dill.loads(dill.dumps(Xi)) 121 assert Defaults is dill.loads(dill.dumps(Defaults))
120 assert Xi == dill.loads(dill.dumps(Xi)) 121 assert Defaults is dill.loads(dill.dumps(Defaults)) 122 assert Defaultsi == dill.loads(dill.dumps(Defaultsi))
120 assert Xi == dill.loads(dill.dumps(Xi)) 121 assert Defaults is dill.loads(dill.dumps(Defaults)) 122 assert Defaultsi == dill.loads(dill.dumps(Defaultsi))
121 assert Defaults is dill.loads(dill.dumps(Defaults)) 122 assert Defaultsi == dill.loads(dill.dumps(Defaultsi)) 123 assert Bad is not dill.loads(dill.dumps(Bad))
121 assert Defaults is dill.loads(dill.dumps(Defaults)) 122 assert Defaultsi == dill.loads(dill.dumps(Defaultsi)) 123 assert Bad is not dill.loads(dill.dumps(Bad))
122 assert Defaultsi == dill.loads(dill.dumps(Defaultsi)) 123 assert Bad is not dill.loads(dill.dumps(Bad)) 124 assert Bad._fields == dill.loads(dill.dumps(Bad))._fields
122 assert Defaultsi == dill.loads(dill.dumps(Defaultsi)) 123 assert Bad is not dill.loads(dill.dumps(Bad)) 124 assert Bad._fields == dill.loads(dill.dumps(Bad))._fields
123 assert Bad is not dill.loads(dill.dumps(Bad)) 124 assert Bad._fields == dill.loads(dill.dumps(Bad))._fields 125 assert tuple(Badi) == tuple(dill.loads(dill.dumps(Badi)))
123 assert Bad is not dill.loads(dill.dumps(Bad)) 124 assert Bad._fields == dill.loads(dill.dumps(Bad))._fields 125 assert tuple(Badi) == tuple(dill.loads(dill.dumps(Badi)))
124 assert Bad._fields == dill.loads(dill.dumps(Bad))._fields 125 assert tuple(Badi) == tuple(dill.loads(dill.dumps(Badi))) 126
124 assert Bad._fields == dill.loads(dill.dumps(Bad))._fields 125 assert tuple(Badi) == tuple(dill.loads(dill.dumps(Badi))) 126
132 a = A() 133 assert dill.copy(a) 134
134
135 assert dill.copy(A.B).__name__ == 'B'
136 assert dill.copy(A.B).__qualname__.endswith('.<locals>.A.B')
135 assert dill.copy(A.B).__name__ == 'B'
136 assert dill.copy(A.B).__qualname__.endswith('.<locals>.A.B')
137 assert dill.copy(A.B).__doc__ == 'docstring'
136 assert dill.copy(A.B).__qualname__.endswith('.<locals>.A.B')
137 assert dill.copy(A.B).__doc__ == 'docstring'
138 assert dill.copy(A.B).__module__ == 'testing'
137 assert dill.copy(A.B).__doc__ == 'docstring' 138 assert dill.copy(A.B).__module__ == 'testing' 139
146 147 assert type(dill.copy(A()(8))).__qualname__ == type(A()(8)).__qualname__ 148
153 dti = np.dtype('int')
154 assert np.dtype == dill.copy(np.dtype)
155 assert dti == dill.copy(dti)
154 assert np.dtype == dill.copy(np.dtype) 155 assert dti == dill.copy(dti) 156 except ImportError: pass
164 y = (x,) 165 assert y == dill.copy(y) 166
188 if not dill._dill.IS_PYPY: 189 assert dill.pickles(a1) 190 assert a1.__dict__ == dill.copy(a1).__dict__
189 assert dill.pickles(a1) 190 assert a1.__dict__ == dill.copy(a1).__dict__ 191
193 if not dill._dill.IS_PYPY: 194 assert dill.pickles(a2) 195 assert a2.__dict__ == dill.copy(a2).__dict__
194 assert dill.pickles(a2) 195 assert a2.__dict__ == dill.copy(a2).__dict__ 196
202 if not dill._dill.IS_PYPY: 203 assert dill.pickles(a3) 204 assert a3.__dict__ == dill.copy(a3).__dict__
203 assert dill.pickles(a3) 204 assert a3.__dict__ == dill.copy(a3).__dict__ 205
217 res = dill.dumps(a) 218 new_obj = dill.loads(res) 219 new_obj.__class__.test()
235 def test_slots(): 236 assert dill.pickles(Y) 237 assert dill.pickles(y)
236 assert dill.pickles(Y) 237 assert dill.pickles(y) 238 assert dill.pickles(Y.y)
237 assert dill.pickles(y) 238 assert dill.pickles(Y.y) 239 assert dill.copy(y).y == value
238 assert dill.pickles(Y.y) 239 assert dill.copy(y).y == value 240 assert dill.copy(Y2(value)).y == value
239 assert dill.copy(y).y == value 240 assert dill.copy(Y2(value)).y == value 241
242 def test_origbases(): 243 assert dill.copy(customIntList).__orig_bases__ == customIntList.__orig_bases__ 244
251 v = A(1) 252 assert dill.copy(v) == v 253
257 cls = super().__new__(mcls, name, bases, ns, **kwds) 258 assert mcls is not None 259 assert cls.method(mcls)
258 assert mcls is not None 259 assert cls.method(mcls) 260 return cls
264 l = locals()
265 exec("""class subclass_with_new(metaclass=metaclass_with_new):
266 def __new__(cls):
267 self = super().__new__(cls)
268 return self""", None, l)
269 subclass_with_new = l['subclass_with_new']
270 271 assert dill.copy(subclass_with_new()) 272
275 import enum 276 assert dill.copy(HTTPStatus.OK) is HTTPStatus.OK 277 assert dill.copy(enum.EnumMeta) is enum.EnumMeta
276 assert dill.copy(HTTPStatus.OK) is HTTPStatus.OK 277 assert dill.copy(enum.EnumMeta) is enum.EnumMeta 278
301 is_pypy = platform.python_implementation() == 'PyPy' 302 assert Bar.__dict__ == Baz.__dict__ 303 # ints
303 # ints 304 assert 'w' in Bar.__dict__ and 'w' in Baz.__dict__ 305 assert Bar.__dict__['w'] is Baz.__dict__['w']
304 assert 'w' in Bar.__dict__ and 'w' in Baz.__dict__ 305 assert Bar.__dict__['w'] is Baz.__dict__['w'] 306 assert 'x' in Bar.__dict__ and 'x' in Baz.__dict__
305 assert Bar.__dict__['w'] is Baz.__dict__['w'] 306 assert 'x' in Bar.__dict__ and 'x' in Baz.__dict__ 307 assert Bar.__dict__['x'] is Baz.__dict__['x']
306 assert 'x' in Bar.__dict__ and 'x' in Baz.__dict__ 307 assert Bar.__dict__['x'] is Baz.__dict__['x'] 308 # floats
308 # floats 309 assert 'y' in Bar.__dict__ and 'y' in Baz.__dict__ 310 same = Bar.__dict__['y'] is Baz.__dict__['y']
310 same = Bar.__dict__['y'] is Baz.__dict__['y'] 311 assert same if is_pypy else not same 312 assert 'z' in Bar.__dict__ and 'z' in Baz.__dict__
311 assert same if is_pypy else not same 312 assert 'z' in Bar.__dict__ and 'z' in Baz.__dict__ 313 same = Bar.__dict__['z'] is Baz.__dict__['z']
313 same = Bar.__dict__['z'] is Baz.__dict__['z'] 314 assert same if is_pypy else not same 315 # tuples
315 # tuples 316 assert 'a' in Bar.__dict__ and 'a' in Baz.__dict__ 317 assert Bar.__dict__['a'] is Baz.__dict__['a']
316 assert 'a' in Bar.__dict__ and 'a' in Baz.__dict__ 317 assert Bar.__dict__['a'] is Baz.__dict__['a'] 318 assert 'b' in Bar.__dict__ and 'b' in Baz.__dict__
317 assert Bar.__dict__['a'] is Baz.__dict__['a'] 318 assert 'b' in Bar.__dict__ and 'b' in Baz.__dict__ 319 assert Bar.__dict__['b'] is not Baz.__dict__['b']
318 assert 'b' in Bar.__dict__ and 'b' in Baz.__dict__ 319 assert Bar.__dict__['b'] is not Baz.__dict__['b'] 320 assert 'c' in Bar.__dict__ and 'c' in Baz.__dict__
319 assert Bar.__dict__['b'] is not Baz.__dict__['b'] 320 assert 'c' in Bar.__dict__ and 'c' in Baz.__dict__ 321 assert Bar.__dict__['c'] is not Baz.__dict__['c']
320 assert 'c' in Bar.__dict__ and 'c' in Baz.__dict__ 321 assert Bar.__dict__['c'] is not Baz.__dict__['c'] 322 # None
322 # None 323 assert 'n' in Bar.__dict__ and 'n' in Baz.__dict__ 324 assert Bar.__dict__['n'] is Baz.__dict__['n']
323 assert 'n' in Bar.__dict__ and 'n' in Baz.__dict__ 324 assert Bar.__dict__['n'] is Baz.__dict__['n'] 325
11 12 import dill 13 import dataclasses
28 save = dill.dumps(before) 29 after = dill.loads(save) 30 assert before != after # classes don't match
29 after = dill.loads(save) 30 assert before != after # classes don't match 31 assert before == B(A(**dataclasses.asdict(after.a)))
30 assert before != after # classes don't match 31 assert before == B(A(**dataclasses.asdict(after.a))) 32 assert dataclasses.asdict(before) == dataclasses.asdict(after)
31 assert before == B(A(**dataclasses.asdict(after.a))) 32 assert dataclasses.asdict(before) == dataclasses.asdict(after) 33
8 9 from dill.detect import baditems, badobjects, badtypes, errors, parent, at, globalvars 10 from dill import settings
9 from dill.detect import baditems, badobjects, badtypes, errors, parent, at, globalvars 10 from dill import settings 11 from dill._dill import IS_PYPY
10 from dill import settings 11 from dill._dill import IS_PYPY 12 from pickle import PicklingError
11 from dill._dill import IS_PYPY 12 from pickle import PicklingError 13
19 f = inspect.currentframe() 20 assert baditems(f) == [f] 21 #assert baditems(globals()) == [f] #XXX
21 #assert baditems(globals()) == [f] #XXX 22 assert badobjects(f) is f 23 assert badtypes(f) == type(f)
22 assert badobjects(f) is f 23 assert badtypes(f) == type(f) 24 assert type(errors(f)) is TypeError
23 assert badtypes(f) == type(f) 24 assert type(errors(f)) is TypeError 25 d = badtypes(f, 1)
25 d = badtypes(f, 1) 26 assert isinstance(d, dict) 27 assert list(badobjects(f, 1).keys()) == list(d.keys())
26 assert isinstance(d, dict) 27 assert list(badobjects(f, 1).keys()) == list(d.keys()) 28 assert list(errors(f, 1).keys()) == list(d.keys())
27 assert list(badobjects(f, 1).keys()) == list(d.keys()) 28 assert list(errors(f, 1).keys()) == list(d.keys()) 29 s = set([(err.__class__.__name__,err.args[0]) for err in list(errors(f, 1).values())])
32 proxy = 0 if type(f.f_locals) is dict else 1 33 assert len(s) == len(a) + proxy # TypeError (and possibly PicklingError) 34 n = 2
34 n = 2 35 assert len(a) is n if 'PicklingError' in a.keys() else n-1 36
40 obj = parent(listiter, list) 41 assert obj is x 42
42 43 if IS_PYPY: assert parent(obj, int) is None 44 else: assert parent(obj, int) is x[-1] # python oddly? finds last int
43 if IS_PYPY: assert parent(obj, int) is None 44 else: assert parent(obj, int) is x[-1] # python oddly? finds last int 45 assert at(id(at)) is at
44 else: assert parent(obj, int) is x[-1] # python oddly? finds last int 45 assert at(id(at)) is at 46
69 c 70 assert globalvars(f) == dict(a=1, b=2, c=3) 71
72 res = globalvars(foo, recurse=True) 73 assert set(res) == set(['squared', 'a']) 74 res = globalvars(foo, recurse=False)
74 res = globalvars(foo, recurse=False)
75 assert res == {}
76 zap = foo(2)
77 res = globalvars(zap, recurse=True) 78 assert set(res) == set(['squared', 'a']) 79 res = globalvars(zap, recurse=False)
79 res = globalvars(zap, recurse=False) 80 assert set(res) == set(['squared']) 81 del zap
82 res = globalvars(squared) 83 assert set(res) == set(['a']) 84 # FIXME: should find referenced __builtins__
109 def test_getstate(): 110 from dill import dumps, loads 111 dumps(f)
113 dumps(lambda: f, recurse=False) # doesn't call __getstate__ 114 assert bar[0] == b 115 dumps(lambda: f, recurse=True) # calls __getstate__
115 dumps(lambda: f, recurse=True) # calls __getstate__ 116 assert bar[0] == b + 1 117
120 global sin 121 from dill import dumps, loads 122 from math import sin, pi
131 del sin 132 sinc_ = loads(_sinc) # no NameError... pickling preserves 'sin' 133 res = sinc_(1)
134 from math import sin 135 assert sinc(1) == res 136
149 globalvars(f, recurse=True, builtin=True) 150 assert z is min 151 assert d is globals()
150 assert z is min 151 assert d is globals() 152
8 9 import dill 10 from dill._dill import OLD310, MAPPING_PROXY_TRICK, DictProxyType
9 import dill 10 from dill._dill import OLD310, MAPPING_PROXY_TRICK, DictProxyType 11
12 def test_dictproxy():
13 assert dill.copy(DictProxyType({'a': 2}))
14
16 x = {'a': 1}
17 assert dill.copy(x.keys())
18 assert dill.copy(x.values())
17 assert dill.copy(x.keys()) 18 assert dill.copy(x.values()) 19 assert dill.copy(x.items())
18 assert dill.copy(x.values()) 19 assert dill.copy(x.items()) 20
28 new_x['c'] = 1 29 assert len(new_x) == 3 and len(x) == 1 30 assert len(seperate_views[0]) == 3 and len(all_views[0]) == 1
29 assert len(new_x) == 3 and len(x) == 1 30 assert len(seperate_views[0]) == 3 and len(all_views[0]) == 1 31 assert len(seperate_views[1]) == 3 and len(all_views[1]) == 1
30 assert len(seperate_views[0]) == 3 and len(all_views[0]) == 1 31 assert len(seperate_views[1]) == 3 and len(all_views[1]) == 1 32 assert len(seperate_views[2]) == 3 and len(all_views[2]) == 1
31 assert len(seperate_views[1]) == 3 and len(all_views[1]) == 1 32 assert len(seperate_views[2]) == 3 and len(all_views[2]) == 1 33 assert dict(all_views[1]) == x
32 assert len(seperate_views[2]) == 3 and len(all_views[2]) == 1 33 assert dict(all_views[1]) == x 34 assert dict(seperate_views[1]) == new_x
33 assert dict(all_views[1]) == x 34 assert dict(seperate_views[1]) == new_x 35
8 9 from dill import __diff as diff 10
23 diff.memorise(a) 24 assert not diff.has_changed(a) 25 c.a = 1
25 c.a = 1 26 assert diff.has_changed(a) 27 diff.memorise(c, force=True)
27 diff.memorise(c, force=True) 28 assert not diff.has_changed(a) 29 c.a = 2
29 c.a = 2 30 assert diff.has_changed(a) 31 changed = diff.whats_changed(a)
31 changed = diff.whats_changed(a) 32 assert list(changed[0].keys()) == ["a"] 33 assert not changed[1]
32 assert list(changed[0].keys()) == ["a"] 33 assert not changed[1] 34
38 diff.memorise(c2) 39 assert not diff.has_changed(c2) 40 a2.append(1)
40 a2.append(1) 41 assert diff.has_changed(c2) 42 changed = diff.whats_changed(c2)
42 changed = diff.whats_changed(c2)
43 assert changed[0] == {}
44 assert changed[1]
43 assert changed[0] == {}
44 assert changed[1]
45
49 diff.memorise(c3) 50 assert not diff.has_changed(c3) 51 a3[1] = 1
51 a3[1] = 1 52 assert diff.has_changed(c3) 53 changed = diff.whats_changed(c3)
53 changed = diff.whats_changed(c3)
54 assert changed[0] == {}
55 assert changed[1]
54 assert changed[0] == {}
55 assert changed[1]
56
60 diff.memorise(abc.ABCMeta, force=True) 61 assert not diff.has_changed(abc) 62 abc.ABCMeta.zzz = 1
62 abc.ABCMeta.zzz = 1 63 assert diff.has_changed(abc) 64 changed = diff.whats_changed(abc)
64 changed = diff.whats_changed(abc) 65 assert list(changed[0].keys()) == ["ABCMeta"] 66 assert not changed[1]
65 assert list(changed[0].keys()) == ["ABCMeta"] 66 assert not changed[1] 67
93 diff.memorise(a) 94 assert not diff.has_changed(a) 95 c.a = 1
95 c.a = 1 96 assert diff.has_changed(a) 97 diff.memorise(c, force=True)
97 diff.memorise(c, force=True) 98 assert not diff.has_changed(a) 99 del c.a
99 del c.a 100 assert diff.has_changed(a) 101 changed = diff.whats_changed(a)
101 changed = diff.whats_changed(a) 102 assert list(changed[0].keys()) == ["a"] 103 assert not changed[1]
102 assert list(changed[0].keys()) == ["a"] 103 assert not changed[1] 104
8 9 import dill as pickle 10 from io import BytesIO as StringIO
18 obj = lambda : my_fn(34) 19 assert obj() == 578 20
27 obj2_io = StringIO(obj_str) 28 unpickler = pickle.Unpickler(obj2_io) 29 obj2 = unpickler.load()
30 31 assert obj2() == 578 32
36 pickler = pickle.Pickler(obj_io) 37 assert pickle._dill.is_dill(pickler) is True 38
39 pickler = pickle._dill.StockPickler(obj_io) 40 assert pickle._dill.is_dill(pickler) is False 41
44 pickler = mp.reduction.ForkingPickler(obj_io) 45 assert pickle._dill.is_dill(pickler, child=True) is True 46 assert pickle._dill.is_dill(pickler, child=False) is False
45 assert pickle._dill.is_dill(pickler, child=True) is True 46 assert pickle._dill.is_dill(pickler, child=False) is False 47 except Exception:
46 assert pickle._dill.is_dill(pickler, child=False) is False 47 except Exception: 48 pass 49
7 8 import dill 9 dill.settings['recurse'] = True
25 def test_doc_dill_issue_219(): 26 back_fn = dill.loads(dill.dumps(get_fun_with_strftime())) 27 assert back_fn() == "1943-01-04 00:00:00"
26 back_fn = dill.loads(dill.dumps(get_fun_with_strftime())) 27 assert back_fn() == "1943-01-04 00:00:00" 28 dupl = dill.loads(dill.dumps(get_fun_with_strftime2))
27 assert back_fn() == "1943-01-04 00:00:00" 28 dupl = dill.loads(dill.dumps(get_fun_with_strftime2)) 29 assert dupl() == get_fun_with_strftime2()
28 dupl = dill.loads(dill.dumps(get_fun_with_strftime2)) 29 assert dupl() == get_fun_with_strftime2() 30
40 import re 41 back_fn = dill.loads(dill.dumps(get_fun_with_internal_import())) 42 import inspect
44 vars = inspect.getclosurevars(back_fn)
45 assert vars.globals == {}
46 assert vars.nonlocals == {}
45 assert vars.globals == {}
46 assert vars.nonlocals == {}
47 assert back_fn() == re.compile("$")
46 assert vars.nonlocals == {}
47 assert back_fn() == re.compile("$")
48 assert "__builtins__" in back_fn.__globals__
47 assert back_fn() == re.compile("$")
48 assert "__builtins__" in back_fn.__globals__
49
13 14 import dill 15
28 for i in range(number): 29 f.write(random.choice(rand_chars)) 30 f.close()
66 f = open(fname, "r") 67 _f = dill.loads(dill.dumps(f, fmode=fmode))#, strictio=strictio)) 68 assert _f.mode == f.mode
67 _f = dill.loads(dill.dumps(f, fmode=fmode))#, strictio=strictio)) 68 assert _f.mode == f.mode 69 assert _f.tell() == f.tell()
68 assert _f.mode == f.mode 69 assert _f.tell() == f.tell() 70 assert _f.read() == f.read()
69 assert _f.tell() == f.tell() 70 assert _f.read() == f.read() 71 f.close()
81 f.close() 82 f2 = dill.loads(f_dumped) #FIXME: fails due to pypy/issues/1233 83 # TypeError: expected py_object instance instead of str
90 if fmode == dill.HANDLE_FMODE: 91 assert open(fname).read() == " world!" 92 assert f2mode == f1mode
91 assert open(fname).read() == " world!" 92 assert f2mode == f1mode 93 assert f2tell == 0
92 assert f2mode == f1mode 93 assert f2tell == 0 94 elif fmode == dill.CONTENTS_FMODE:
94 elif fmode == dill.CONTENTS_FMODE: 95 assert open(fname).read() == "hello world!" 96 assert f2mode == f1mode
95 assert open(fname).read() == "hello world!" 96 assert f2mode == f1mode 97 assert f2tell == ftell
96 assert f2mode == f1mode 97 assert f2tell == ftell 98 assert f2name == fname
97 assert f2tell == ftell 98 assert f2name == fname 99 elif fmode == dill.FILE_FMODE:
99 elif fmode == dill.FILE_FMODE: 100 assert open(fname).read() == "hello world!" 101 assert f2mode == f1mode
100 assert open(fname).read() == "hello world!" 101 assert f2mode == f1mode 102 assert f2tell == ftell
101 assert f2mode == f1mode 102 assert f2tell == ftell 103 else:
115 f.close() 116 f2 = dill.loads(f_dumped) 117 f2mode = f2.mode
121 122 assert f2mode == f1mode 123 if fmode == dill.CONTENTS_FMODE:
123 if fmode == dill.CONTENTS_FMODE: 124 assert open(fname).read() == "hello world!" 125 assert f2tell == ftell
124 assert open(fname).read() == "hello world!" 125 assert f2tell == ftell 126 elif fmode == dill.HANDLE_FMODE:
126 elif fmode == dill.HANDLE_FMODE: 127 assert open(fname).read() == "hello world!" 128 assert f2tell == ftell
127 assert open(fname).read() == "hello world!" 128 assert f2tell == ftell 129 elif fmode == dill.FILE_FMODE:
129 elif fmode == dill.FILE_FMODE: 130 assert open(fname).read() == "hello world!" 131 assert f2tell == ftell
130 assert open(fname).read() == "hello world!" 131 assert f2tell == ftell 132 else:
149 if strictio: # throw error if ftell > EOF 150 assert throws(dill.loads, (f_dumped,), buffer_error) 151 else:
151 else: 152 f2 = dill.loads(f_dumped) 153 assert f2.mode == f1mode
152 f2 = dill.loads(f_dumped) 153 assert f2.mode == f1mode 154 if fmode == dill.CONTENTS_FMODE:
154 if fmode == dill.CONTENTS_FMODE: 155 assert f2.tell() == _flen 156 assert f2.read() == ""
155 assert f2.tell() == _flen 156 assert f2.read() == "" 157 f2.seek(0)
157 f2.seek(0) 158 assert f2.read() == _fstr 159 assert f2.tell() == _flen # 150
158 assert f2.read() == _fstr 159 assert f2.tell() == _flen # 150 160 elif fmode == dill.HANDLE_FMODE:
160 elif fmode == dill.HANDLE_FMODE: 161 assert f2.tell() == 0 162 assert f2.read() == _fstr
161 assert f2.tell() == 0 162 assert f2.read() == _fstr 163 assert f2.tell() == _flen # 150
162 assert f2.read() == _fstr 163 assert f2.tell() == _flen # 150 164 elif fmode == dill.FILE_FMODE:
164 elif fmode == dill.FILE_FMODE: 165 assert f2.tell() == ftell # 200 166 assert f2.read() == ""
165 assert f2.tell() == ftell # 200 166 assert f2.read() == "" 167 f2.seek(0)
167 f2.seek(0) 168 assert f2.read() == fstr 169 assert f2.tell() == ftell # 200
168 assert f2.read() == fstr 169 assert f2.tell() == ftell # 200 170 else:
191 if strictio: # throw error if ftell > EOF 192 assert throws(dill.loads, (f_dumped,), buffer_error) 193 else:
193 else: 194 f2 = dill.loads(f_dumped) 195 f2mode = f2.mode
199 if fmode == dill.CONTENTS_FMODE: 200 assert open(fname).read() == "h world!" 201 assert f2mode == f1mode
200 assert open(fname).read() == "h world!" 201 assert f2mode == f1mode 202 assert f2tell == _ftell
201 assert f2mode == f1mode 202 assert f2tell == _ftell 203 elif fmode == dill.HANDLE_FMODE:
203 elif fmode == dill.HANDLE_FMODE: 204 assert open(fname).read() == " world!" 205 assert f2mode == f1mode
204 assert open(fname).read() == " world!" 205 assert f2mode == f1mode 206 assert f2tell == 0
205 assert f2mode == f1mode 206 assert f2tell == 0 207 elif fmode == dill.FILE_FMODE:
207 elif fmode == dill.FILE_FMODE: 208 assert open(fname).read() == "hello world!" 209 assert f2mode == f1mode
208 assert open(fname).read() == "hello world!" 209 assert f2mode == f1mode 210 assert f2tell == ftell
209 assert f2mode == f1mode 210 assert f2tell == ftell 211 else:
232 if strictio: # throw error if ftell > EOF 233 assert throws(dill.loads, (f_dumped,), buffer_error) 234 else:
234 else: 235 f2 = dill.loads(f_dumped) 236 f2mode = f2.mode
239 f2.close() 240 assert f2mode == f1mode 241 if fmode == dill.CONTENTS_FMODE:
242 # position of writes cannot be changed on some OSs 243 assert open(fname).read() == "h world!" 244 assert f2tell == _ftell
243 assert open(fname).read() == "h world!" 244 assert f2tell == _ftell 245 elif fmode == dill.HANDLE_FMODE:
245 elif fmode == dill.HANDLE_FMODE: 246 assert open(fname).read() == "h world!" 247 assert f2tell == _ftell
246 assert open(fname).read() == "h world!" 247 assert f2tell == _ftell 248 elif fmode == dill.FILE_FMODE:
248 elif fmode == dill.FILE_FMODE: 249 assert open(fname).read() == "hello world!" 250 assert f2tell == ftell
249 assert open(fname).read() == "hello world!" 250 assert f2tell == ftell 251 else:
269 if strictio: # throw error if file DNE 270 assert throws(dill.loads, (f_dumped,), dne_error) 271 else:
271 else: 272 f2 = dill.loads(f_dumped) 273 assert f2.mode == f1mode
272 f2 = dill.loads(f_dumped) 273 assert f2.mode == f1mode 274 if fmode == dill.CONTENTS_FMODE:
276 # assert f2.tell() == ftell # 200 277 assert f2.read() == "" 278 f2.seek(0)
278 f2.seek(0) 279 assert f2.read() == "" 280 assert f2.tell() == 0
279 assert f2.read() == "" 280 assert f2.tell() == 0 281 elif fmode == dill.FILE_FMODE:
281 elif fmode == dill.FILE_FMODE: 282 assert f2.tell() == ftell # 200 283 assert f2.read() == ""
282 assert f2.tell() == ftell # 200 283 assert f2.read() == "" 284 f2.seek(0)
284 f2.seek(0) 285 assert f2.read() == fstr 286 assert f2.tell() == ftell # 200
285 assert f2.read() == fstr 286 assert f2.tell() == ftell # 200 287 elif fmode == dill.HANDLE_FMODE:
287 elif fmode == dill.HANDLE_FMODE: 288 assert f2.tell() == 0 289 assert f2.read() == ""
288 assert f2.tell() == 0 289 assert f2.read() == "" 290 assert f2.tell() == 0
289 assert f2.read() == "" 290 assert f2.tell() == 0 291 else:
308 if strictio: # throw error if file DNE 309 assert throws(dill.loads, (f_dumped,), dne_error) 310 else:
310 else: 311 f2 = dill.loads(f_dumped) 312 f2mode = f2.mode
316 if fmode == dill.CONTENTS_FMODE: 317 assert open(fname).read() == " world!" 318 assert f2mode == 'w+'
317 assert open(fname).read() == " world!" 318 assert f2mode == 'w+' 319 assert f2tell == 0
318 assert f2mode == 'w+' 319 assert f2tell == 0 320 elif fmode == dill.HANDLE_FMODE:
320 elif fmode == dill.HANDLE_FMODE: 321 assert open(fname).read() == " world!" 322 assert f2mode == f1mode
321 assert open(fname).read() == " world!" 322 assert f2mode == f1mode 323 assert f2tell == 0
322 assert f2mode == f1mode 323 assert f2tell == 0 324 elif fmode == dill.FILE_FMODE:
324 elif fmode == dill.FILE_FMODE: 325 assert open(fname).read() == "hello world!" 326 assert f2mode == f1mode
325 assert open(fname).read() == "hello world!" 326 assert f2mode == f1mode 327 assert f2tell == ftell
326 assert f2mode == f1mode 327 assert f2tell == ftell 328 else:
344 if strictio: # throw error if file DNE 345 assert throws(dill.loads, (f_dumped,), dne_error) 346 else:
346 else: 347 f2 = dill.loads(f_dumped) 348 f2mode = f2.mode
351 f2.close() 352 assert f2mode == f1mode 353 if fmode == dill.CONTENTS_FMODE:
353 if fmode == dill.CONTENTS_FMODE: 354 assert open(fname).read() == " world!" 355 assert f2tell == 0
354 assert open(fname).read() == " world!" 355 assert f2tell == 0 356 elif fmode == dill.HANDLE_FMODE:
356 elif fmode == dill.HANDLE_FMODE: 357 assert open(fname).read() == " world!" 358 assert f2tell == 0
357 assert open(fname).read() == " world!" 358 assert f2tell == 0 359 elif fmode == dill.FILE_FMODE:
359 elif fmode == dill.FILE_FMODE: 360 assert open(fname).read() == "hello world!" 361 assert f2tell == ftell
360 assert open(fname).read() == "hello world!" 361 assert f2tell == ftell 362 else:
380 381 f2 = dill.loads(f_dumped) 382 assert f2.mode == f1mode
381 f2 = dill.loads(f_dumped) 382 assert f2.mode == f1mode 383 if fmode == dill.CONTENTS_FMODE:
383 if fmode == dill.CONTENTS_FMODE: 384 assert f2.tell() == ftell # 200 385 assert f2.read() == _fstr[ftell:]
384 assert f2.tell() == ftell # 200 385 assert f2.read() == _fstr[ftell:] 386 f2.seek(0)
386 f2.seek(0) 387 assert f2.read() == _fstr 388 assert f2.tell() == _flen # 250
387 assert f2.read() == _fstr 388 assert f2.tell() == _flen # 250 389 elif fmode == dill.HANDLE_FMODE:
389 elif fmode == dill.HANDLE_FMODE: 390 assert f2.tell() == 0 391 assert f2.read() == _fstr
390 assert f2.tell() == 0 391 assert f2.read() == _fstr 392 assert f2.tell() == _flen # 250
391 assert f2.read() == _fstr 392 assert f2.tell() == _flen # 250 393 elif fmode == dill.FILE_FMODE:
393 elif fmode == dill.FILE_FMODE: 394 assert f2.tell() == ftell # 200 395 assert f2.read() == ""
394 assert f2.tell() == ftell # 200 395 assert f2.read() == "" 396 f2.seek(0)
396 f2.seek(0) 397 assert f2.read() == fstr 398 assert f2.tell() == ftell # 200
397 assert f2.read() == fstr 398 assert f2.tell() == ftell # 200 399 else:
418 419 f2 = dill.loads(f_dumped) 420 f2mode = f2.mode
424 if fmode == dill.CONTENTS_FMODE: 425 assert open(fname).read() == "hello world!odbye!" 426 assert f2mode == f1mode
425 assert open(fname).read() == "hello world!odbye!" 426 assert f2mode == f1mode 427 assert f2tell == ftell
426 assert f2mode == f1mode 427 assert f2tell == ftell 428 elif fmode == dill.HANDLE_FMODE:
428 elif fmode == dill.HANDLE_FMODE: 429 assert open(fname).read() == " world!" 430 assert f2mode == f1mode
429 assert open(fname).read() == " world!" 430 assert f2mode == f1mode 431 assert f2tell == 0
430 assert f2mode == f1mode 431 assert f2tell == 0 432 elif fmode == dill.FILE_FMODE:
432 elif fmode == dill.FILE_FMODE: 433 assert open(fname).read() == "hello world!" 434 assert f2mode == f1mode
433 assert open(fname).read() == "hello world!" 434 assert f2mode == f1mode 435 assert f2tell == ftell
434 assert f2mode == f1mode 435 assert f2tell == ftell 436 else:
456 457 f2 = dill.loads(f_dumped) 458 f2mode = f2.mode
461 f2.close() 462 assert f2mode == f1mode 463 if fmode == dill.CONTENTS_FMODE:
463 if fmode == dill.CONTENTS_FMODE: 464 assert open(fname).read() == "hello and goodbye! world!" 465 assert f2tell == ftell
464 assert open(fname).read() == "hello and goodbye! world!" 465 assert f2tell == ftell 466 elif fmode == dill.HANDLE_FMODE:
466 elif fmode == dill.HANDLE_FMODE: 467 assert open(fname).read() == "hello and goodbye! world!" 468 assert f2tell == _ftell
467 assert open(fname).read() == "hello and goodbye! world!" 468 assert f2tell == _ftell 469 elif fmode == dill.FILE_FMODE:
469 elif fmode == dill.FILE_FMODE: 470 assert open(fname).read() == "hello world!" 471 assert f2tell == ftell
470 assert open(fname).read() == "hello world!" 471 assert f2tell == ftell 472 else:
8 import functools 9 import dill 10 import sys
32
33 exec('''
34 def function_e(e, *e1, e2=1, e3=2):
35 return e + sum(e1) + e2 + e3''')
36
60 61 import dill, pickletools 62 f = Foo()
63 f1 = dill.copy(f) 64 assert f1.f2() is f1 65
68 dumped_func_a = dill.dumps(function_a) 69 assert dill.loads(dumped_func_a)(0) == 0 70
68 dumped_func_a = dill.dumps(function_a) 69 assert dill.loads(dumped_func_a)(0) == 0 70
71 dumped_func_b = dill.dumps(function_b) 72 assert dill.loads(dumped_func_b)(1,2) == 3 73
71 dumped_func_b = dill.dumps(function_b) 72 assert dill.loads(dumped_func_b)(1,2) == 3 73
74 dumped_func_c = dill.dumps(function_c) 75 assert dill.loads(dumped_func_c)(1) == 2 76 assert dill.loads(dumped_func_c)(1, 2) == 3
74 dumped_func_c = dill.dumps(function_c) 75 assert dill.loads(dumped_func_c)(1) == 2 76 assert dill.loads(dumped_func_c)(1, 2) == 3
75 assert dill.loads(dumped_func_c)(1) == 2 76 assert dill.loads(dumped_func_c)(1, 2) == 3 77
75 assert dill.loads(dumped_func_c)(1) == 2 76 assert dill.loads(dumped_func_c)(1, 2) == 3 77
78 dumped_func_d = dill.dumps(function_d) 79 assert dill.loads(dumped_func_d).__doc__ == function_d.__doc__ 80 assert dill.loads(dumped_func_d).__module__ == function_d.__module__
78 dumped_func_d = dill.dumps(function_d) 79 assert dill.loads(dumped_func_d).__doc__ == function_d.__doc__ 80 assert dill.loads(dumped_func_d).__module__ == function_d.__module__
79 assert dill.loads(dumped_func_d).__doc__ == function_d.__doc__ 80 assert dill.loads(dumped_func_d).__module__ == function_d.__module__ 81 assert dill.loads(dumped_func_d)(1, 2) == 4
79 assert dill.loads(dumped_func_d).__doc__ == function_d.__doc__ 80 assert dill.loads(dumped_func_d).__module__ == function_d.__module__ 81 assert dill.loads(dumped_func_d)(1, 2) == 4
80 assert dill.loads(dumped_func_d).__module__ == function_d.__module__ 81 assert dill.loads(dumped_func_d)(1, 2) == 4 82 assert dill.loads(dumped_func_d)(1, 2, 3) == 6
80 assert dill.loads(dumped_func_d).__module__ == function_d.__module__ 81 assert dill.loads(dumped_func_d)(1, 2) == 4 82 assert dill.loads(dumped_func_d)(1, 2, 3) == 6
81 assert dill.loads(dumped_func_d)(1, 2) == 4 82 assert dill.loads(dumped_func_d)(1, 2, 3) == 6 83 assert dill.loads(dumped_func_d)(1, 2, d2=3) == 6
81 assert dill.loads(dumped_func_d)(1, 2) == 4 82 assert dill.loads(dumped_func_d)(1, 2, 3) == 6 83 assert dill.loads(dumped_func_d)(1, 2, d2=3) == 6
82 assert dill.loads(dumped_func_d)(1, 2, 3) == 6 83 assert dill.loads(dumped_func_d)(1, 2, d2=3) == 6 84
82 assert dill.loads(dumped_func_d)(1, 2, 3) == 6 83 assert dill.loads(dumped_func_d)(1, 2, d2=3) == 6 84
87 dumped_func_cache = dill.dumps(function_with_cache) 88 assert function_with_cache(2) == 3 89 assert function_with_cache(1) == 1
88 assert function_with_cache(2) == 3 89 assert function_with_cache(1) == 1 90 assert function_with_cache(3) == 6
89 assert function_with_cache(1) == 1 90 assert function_with_cache(3) == 6 91 assert function_with_cache(2) == 3
90 assert function_with_cache(3) == 6 91 assert function_with_cache(2) == 3 92
93 empty_cell = function_with_unassigned_variable() 94 cell_copy = dill.loads(dill.dumps(empty_cell)) 95 assert 'empty' in str(cell_copy.__closure__[0])
94 cell_copy = dill.loads(dill.dumps(empty_cell)) 95 assert 'empty' in str(cell_copy.__closure__[0]) 96 try:
97 cell_copy() 98 except Exception: 99 # this is good 100 pass 101 else:
103
104 exec('''
105 dumped_func_e = dill.dumps(function_e)
106 assert dill.loads(dumped_func_e)(1, 2) == 6
107 assert dill.loads(dumped_func_e)(1, 2, 3) == 9
108 assert dill.loads(dumped_func_e)(1, 2, e2=3) == 8
109 assert dill.loads(dumped_func_e)(1, 2, e2=3, e3=4) == 10
110 assert dill.loads(dumped_func_e)(1, 2, 3, e2=4) == 12
111 assert dill.loads(dumped_func_e)(1, 2, 3, e2=4, e3=5) == 15''')
112
114 import warnings 115 from dill._dill import ALL_CODE_PARAMS, CODE_PARAMS, CODE_VERSION, _create_code 116 code = function_c.__code__
9 import functools 10 import dill 11 dill.settings['recurse'] = True
31 32 assert dill.pickles(fp, safe=True) 33 assert dill.pickles(gp, safe=True)
32 assert dill.pickles(fp, safe=True) 33 assert dill.pickles(gp, safe=True) 34 assert dill.pickles(hp, safe=True)
33 assert dill.pickles(gp, safe=True) 34 assert dill.pickles(hp, safe=True) 35 assert dill.pickles(bp, safe=True)
34 assert dill.pickles(hp, safe=True) 35 assert dill.pickles(bp, safe=True) 36
11 12 import dill 13 from dill import detect
12 import dill 13 from dill import detect 14 from dill.logger import stderr_handler, adapter as logger
13 from dill import detect 14 from dill.logger import stderr_handler, adapter as logger 15
33 for line in buffer.getvalue().splitlines(): 34 assert regex.fullmatch(line) 35 return buffer.getvalue()
36 else: 37 assert buffer.getvalue() == "" 38 finally:
53 file_trace, stream_trace = regdict.sub(r'\1{}>', file_trace), regdict.sub(r'\1{}>', stream_trace)
54 assert file_trace == stream_trace
55
68 test_logging(should_trace=False) 69 assert logger.getEffectiveLevel() == loglevel 70 test_trace_to_file(stream_trace)
8 9 import dill 10 dill.settings['recurse'] = True
78 # test mixins 79 assert double_add(1,2,3) == 2*fx 80 double_add.invert()
80 double_add.invert() 81 assert double_add(1,2,3) == -2*fx 82
83 _d = dill.copy(double_add) 84 assert _d(1,2,3) == -2*fx 85 #_d.invert() #FIXME: fails seemingly randomly
87 88 assert _d.__wrapped__(1,2,3) == fx 89
96 dd = dill.detect 97 assert ds.getsource(dd.freevars(quadish)['f']) == '@quad_factory(a=0,b=4,c=0)\ndef quadish(x):\n return x+1\n' 98 assert ds.getsource(dd.freevars(quadruple)['f']) == '@doubler\ndef quadruple(x):\n return 2*x\n'
97 assert ds.getsource(dd.freevars(quadish)['f']) == '@quad_factory(a=0,b=4,c=0)\ndef quadish(x):\n return x+1\n' 98 assert ds.getsource(dd.freevars(quadruple)['f']) == '@doubler\ndef quadruple(x):\n return 2*x\n' 99 assert ds.importable(quadish, source=False) == 'from %s import quadish\n' % __name__
98 assert ds.getsource(dd.freevars(quadruple)['f']) == '@doubler\ndef quadruple(x):\n return 2*x\n' 99 assert ds.importable(quadish, source=False) == 'from %s import quadish\n' % __name__ 100 assert ds.importable(quadruple, source=False) == 'from %s import quadruple\n' % __name__
99 assert ds.importable(quadish, source=False) == 'from %s import quadish\n' % __name__ 100 assert ds.importable(quadruple, source=False) == 'from %s import quadruple\n' % __name__ 101 assert ds.importable(quadratic, source=False) == 'from %s import quadratic\n' % __name__
100 assert ds.importable(quadruple, source=False) == 'from %s import quadruple\n' % __name__ 101 assert ds.importable(quadratic, source=False) == 'from %s import quadratic\n' % __name__ 102 assert ds.importable(double_add, source=False) == 'from %s import double_add\n' % __name__
101 assert ds.importable(quadratic, source=False) == 'from %s import quadratic\n' % __name__ 102 assert ds.importable(double_add, source=False) == 'from %s import double_add\n' % __name__ 103 assert ds.importable(quadruple, source=True) == 'def doubler(f):\n def inner(*args, **kwds):\n fx = f(*args, **kwds)\n return 2*fx\n return inner\n\n@doubler\ndef quadruple(x):\n return 2*x\n'
102 assert ds.importable(double_add, source=False) == 'from %s import double_add\n' % __name__ 103 assert ds.importable(quadruple, source=True) == 'def doubler(f):\n def inner(*args, **kwds):\n fx = f(*args, **kwds)\n return 2*fx\n return inner\n\n@doubler\ndef quadruple(x):\n return 2*x\n' 104 #***** #FIXME: this needs work
106 a,b,c,_,result = result.split('\n',4)
107 assert result == 'def quad_factory(a=1,b=1,c=0):\n def dec(f):\n def func(*args,**kwds):\n fx = f(*args,**kwds)\n return a*fx**2 + b*fx + c\n return func\n return dec\n\n@quad_factory(a=0,b=4,c=0)\ndef quadish(x):\n return x+1\n'
108 assert set([a,b,c]) == set(['a = 0', 'c = 0', 'b = 4'])
107 assert result == 'def quad_factory(a=1,b=1,c=0):\n def dec(f):\n def func(*args,**kwds):\n fx = f(*args,**kwds)\n return a*fx**2 + b*fx + c\n return func\n return dec\n\n@quad_factory(a=0,b=4,c=0)\ndef quadish(x):\n return x+1\n' 108 assert set([a,b,c]) == set(['a = 0', 'c = 0', 'b = 4']) 109 result = ds.importable(quadratic, source=True)
110 a,b,c,result = result.split('\n',3)
111 assert result == '\ndef dec(f):\n def func(*args,**kwds):\n fx = f(*args,**kwds)\n return a*fx**2 + b*fx + c\n return func\n'
112 assert set([a,b,c]) == set(['a = 1', 'c = 0', 'b = 1'])
111 assert result == '\ndef dec(f):\n def func(*args,**kwds):\n fx = f(*args,**kwds)\n return a*fx**2 + b*fx + c\n return func\n' 112 assert set([a,b,c]) == set(['a = 1', 'c = 0', 'b = 1']) 113 result = ds.importable(double_add, source=True)
114 a,b,c,d,_,result = result.split('\n',5)
115 assert result == 'def quad(a=1, b=1, c=0):\n inverted = [False]\n def invert():\n inverted[0] = not inverted[0]\n def dec(f):\n def func(*args, **kwds):\n x = f(*args, **kwds)\n if inverted[0]: x = -x\n return a*x**2 + b*x + c\n func.__wrapped__ = f\n func.invert = invert\n func.inverted = inverted\n return func\n return dec\n\n@quad(a=0,b=2)\ndef double_add(*args):\n return sum(args)\n'
116 assert set([a,b,c,d]) == set(['a = 0', 'c = 0', 'b = 2', 'inverted = [True]'])
115 assert result == 'def quad(a=1, b=1, c=0):\n inverted = [False]\n def invert():\n inverted[0] = not inverted[0]\n def dec(f):\n def func(*args, **kwds):\n x = f(*args, **kwds)\n if inverted[0]: x = -x\n return a*x**2 + b*x + c\n func.__wrapped__ = f\n func.invert = invert\n func.inverted = inverted\n return func\n return dec\n\n@quad(a=0,b=2)\ndef double_add(*args):\n return sum(args)\n' 116 assert set([a,b,c,d]) == set(['a = 0', 'c = 0', 'b = 2', 'inverted = [True]']) 117 #*****
9 import sys 10 import dill 11 import test_mixins as module
27 28 module = dill.loads(pik_mod) 29 def test_attributes():
30 #assert hasattr(module, "a") and module.a == 1234 #FIXME: -m dill.tests 31 assert module.double_add(1, 2, 3) == 2 * module.fx 32
49 50 module = dill.loads(pik_mod) 51 def test_diff_attributes():
51 def test_diff_attributes(): 52 assert hasattr(module, "a") and module.a == 1234 53 assert module.double_add(1, 2, 3) == 2 * module.fx
52 assert hasattr(module, "a") and module.a == 1234 53 assert module.double_add(1, 2, 3) == 2 * module.fx 54
71 def get_lambda(str, **kwarg): 72 return eval(str, kwarg, None) 73
76 def test_module_is_none(): 77 assert obj.__module__ is None 78 assert dill.copy(obj)(3) == obj(3)
77 assert obj.__module__ is None 78 assert dill.copy(obj)(3) == obj(3) 79
8 9 import dill 10 dill.settings['recurse'] = True
21 def test_decorated(): 22 assert dill.pickles(f2) 23
14 15 import dill as pickle 16 pickle.settings['recurse'] = True
66 # ... 67 la = pickle.loads(pa) 68 lmath = pickle.loads(pmath)
67 la = pickle.loads(pa) 68 lmath = pickle.loads(pmath) 69 lmap = pickle.loads(pmap)
68 lmath = pickle.loads(pmath) 69 lmap = pickle.loads(pmap) 70 assert list(map(math.sin, a)) == list(lmap(lmath.sin, la))
69 lmap = pickle.loads(pmap) 70 assert list(map(math.sin, a)) == list(lmap(lmath.sin, la)) 71
74 pbasic2 = pickle.dumps(basic2) 75 _pbasic2 = pickle.loads(pbasic2)() 76 pbasic = pickle.dumps(basic)
76 pbasic = pickle.dumps(basic) 77 _pbasic = pickle.loads(pbasic)() 78
81 pc2adder = pickle.dumps(c2adder) 82 pc2add5 = pickle.loads(pc2adder)(x) 83 assert pc2add5(y) == x+y
82 pc2add5 = pickle.loads(pc2adder)(x) 83 assert pc2add5(y) == x+y 84
87 pcadder = pickle.dumps(cadder) 88 pcadd5 = pickle.loads(pcadder)(x) 89 assert pcadd5(y) == x+y
88 pcadd5 = pickle.loads(pcadder)(x) 89 assert pcadd5(y) == x+y 90
93 add5 = adder(x) 94 assert add5(y) == x+y 95
98 padder = pickle.dumps(adder) 99 padd5 = pickle.loads(padder)(x) 100 assert padd5(y) == x+y
99 padd5 = pickle.loads(padder)(x) 100 assert padd5(y) == x+y 101
105 pinner = pickle.dumps(add5) #XXX: FAILS in pickle 106 p5add = pickle.loads(pinner) 107 assert p5add(y) == x+y
106 p5add = pickle.loads(pinner) 107 assert p5add(y) == x+y 108
12 13 import dill as pickle 14 pickle.settings['recurse'] = True
18 # get all objects for testing 19 from dill import load_types, objects, extend 20 load_types(pickleable=True,unpickleable=False)
41 try: 42 pik = pickle.loads(pickle.dumps(obj)) 43 if exact:
44 try: 45 assert pik == obj 46 except AssertionError:
46 except AssertionError:
47 assert type(obj) == type(pik)
48 if verbose: print ("weak: %s %s" % (name, type(obj)))
49 else: 50 assert type(obj) == type(pik) 51 except Exception:
10 11 import dill 12 dill.settings['recurse'] = True
29 FooS = dill.copy(Foo) 30 assert FooS.data.fget is not None 31 assert FooS.data.fset is not None
30 assert FooS.data.fget is not None 31 assert FooS.data.fset is not None 32 assert FooS.data.fdel is None
31 assert FooS.data.fset is not None 32 assert FooS.data.fdel is None 33
42 else: 43 assert res == 1 44
55 else: 56 assert res == 1024 57
11 12 import dill 13 import warnings
7 8 import dill 9 from functools import partial
16 return dill.copy(obj, byref=byref, recurse=recurse) 17 except Exception: 18 pass 19 else:
44 def test_super(): 45 assert copy(obj1(), byref=True) 46 assert copy(obj1(), byref=True, recurse=True)
45 assert copy(obj1(), byref=True) 46 assert copy(obj1(), byref=True, recurse=True) 47 assert copy(obj1(), recurse=True)
46 assert copy(obj1(), byref=True, recurse=True) 47 assert copy(obj1(), recurse=True) 48 assert copy(obj1())
47 assert copy(obj1(), recurse=True) 48 assert copy(obj1()) 49
49 50 assert copy(obj2(), byref=True) 51 assert copy(obj2(), byref=True, recurse=True)
50 assert copy(obj2(), byref=True) 51 assert copy(obj2(), byref=True, recurse=True) 52 assert copy(obj2(), recurse=True)
51 assert copy(obj2(), byref=True, recurse=True) 52 assert copy(obj2(), recurse=True) 53 assert copy(obj2())
52 assert copy(obj2(), recurse=True) 53 assert copy(obj2()) 54
54 55 assert copy(obj3(), byref=True) 56 assert copy(obj3(), byref=True, recurse=True)
55 assert copy(obj3(), byref=True) 56 assert copy(obj3(), byref=True, recurse=True) 57 assert copy(obj3(), recurse=True)
56 assert copy(obj3(), byref=True, recurse=True) 57 assert copy(obj3(), recurse=True) 58 assert copy(obj3())
57 assert copy(obj3(), recurse=True) 58 assert copy(obj3()) 59
75 def test_partial(): 76 assert copy(Machine(), byref=True) 77 assert copy(Machine(), byref=True, recurse=True)
76 assert copy(Machine(), byref=True) 77 assert copy(Machine(), byref=True, recurse=True) 78 assert copy(Machine(), recurse=True)
77 assert copy(Machine(), byref=True, recurse=True) 78 assert copy(Machine(), recurse=True) 79 assert copy(Machine())
78 assert copy(Machine(), recurse=True) 79 assert copy(Machine()) 80
94 def test_partials(): 95 assert copy(SubMachine(), byref=True) 96 assert copy(SubMachine(), byref=True, recurse=True)
95 assert copy(SubMachine(), byref=True) 96 assert copy(SubMachine(), byref=True, recurse=True) 97 assert copy(SubMachine(), recurse=True)
96 assert copy(SubMachine(), byref=True, recurse=True) 97 assert copy(SubMachine(), recurse=True) 98 assert copy(SubMachine())
97 assert copy(SubMachine(), recurse=True) 98 assert copy(SubMachine()) 99
112 def test_circular_reference(): 113 assert copy(obj4()) 114 obj4_copy = dill.loads(dill.dumps(obj4()))
113 assert copy(obj4()) 114 obj4_copy = dill.loads(dill.dumps(obj4())) 115 assert type(obj4_copy) is type(obj4_copy).__init__.__closure__[0].cell_contents
114 obj4_copy = dill.loads(dill.dumps(obj4())) 115 assert type(obj4_copy) is type(obj4_copy).__init__.__closure__[0].cell_contents 116 assert type(obj4_copy.b) is type(obj4_copy.b).__init__.__closure__[0].cell_contents
115 assert type(obj4_copy) is type(obj4_copy).__init__.__closure__[0].cell_contents 116 assert type(obj4_copy.b) is type(obj4_copy.b).__init__.__closure__[0].cell_contents 117
125 def test_function_cells(): 126 assert copy(f()) 127
129 def fib(n): 130 assert n >= 0 131 if n <= 1:
142 del fib 143 assert fib2(5) == 5 144 for _fib in (fib3, fib4):
146 _fib(5) 147 except Exception: 148 # This is expected to fail because fib no longer exists 149 pass 150 else:
164 g = copy(collection_function_recursion()) 165 assert g()['g'] is g 166
10 11 import dill 12 from dill._objects import failures, registered, succeeds
11 import dill 12 from dill._objects import failures, registered, succeeds 13 import warnings
27 try: 28 assert not bool(fails) 29 except AssertionError as e:
34 try: 35 assert not bool(register) 36 except AssertionError as e:
41 try: 42 assert not bool(success) 43 except AssertionError as e:
53 try: 54 assert not bool(diff) 55 except AssertionError as e:
60 try: 61 assert not bool(miss) 62 except AssertionError as e:
8 9 import dill 10
22 def test_function_with_restricted_object(): 23 deserialized = dill.loads(dill.dumps(restricted_func, recurse=True)) 24
11 12 import dill 13 dill.settings['recurse'] = True
24 if verbose: print ("%s: %s, %s" % (ok, type(j), j))
25 assert ok
26 if verbose: print ("")
40 41 from dill import objects 42 from dill import load_types
41 from dill import objects 42 from dill import load_types 43 load_types(pickleable=True,unpickleable=False)
60 if verbose: print ("%s: %s, %s" % (ok, type(i), i))
61 assert ok
62 if verbose: print ("")
66 if verbose: print ("%s: %s, %s" % (ok, type(i), i))
67 assert ok
68 if verbose: print ("")
75 if verbose: print ("%s: %s, %s" % (ok, type(o), o))
76 assert ok
77 ok = dill.pickles(oo)
78 if verbose: print ("%s: %s, %s" % (ok, type(oo), oo))
79 assert ok
80 if verbose: print ("")
89 if verbose: print ("%s: %s, %s" % (ok, type(f), f))
90 assert not ok
91 ok = dill.pickles(g)
92 if verbose: print ("%s: %s, %s" % (ok, type(g), g))
93 assert _is(not ok) #XXX: dill fails
94 ok = dill.pickles(t)
95 if verbose: print ("%s: %s, %s" % (ok, type(t), t))
96 assert not ok #XXX: dill fails
97 ok = dill.pickles(e)
98 if verbose: print ("%s: %s, %s" % (ok, type(e), e))
99 assert ok
100 if verbose: print ("")
104 x = typing.Any 105 assert x == dill.copy(x) 106 x = typing.Dict[int, str]
106 x = typing.Dict[int, str] 107 assert x == dill.copy(x) 108 x = typing.List[int]
108 x = typing.List[int] 109 assert x == dill.copy(x) 110 x = typing.Tuple[int, str]
110 x = typing.Tuple[int, str] 111 assert x == dill.copy(x) 112 x = typing.Tuple[int]
112 x = typing.Tuple[int] 113 assert x == dill.copy(x) 114 x = typing.Tuple[()]
114 x = typing.Tuple[()] 115 assert x == dill.copy(x) 116 x = typing.Tuple[()].copy_with(())
116 x = typing.Tuple[()].copy_with(()) 117 assert x == dill.copy(x) 118 return
14 15 import dill 16
41 for obj in ('json', 'url', 'local_mod', 'sax', 'dom'):
42 assert globals()[obj].__name__ in sys.modules
43 assert 'calendar' in sys.modules and 'cmath' in sys.modules
42 assert globals()[obj].__name__ in sys.modules 43 assert 'calendar' in sys.modules and 'cmath' in sys.modules 44 import calendar, cmath
46 for obj in ('Calendar', 'isleap'):
47 assert globals()[obj] is sys.modules['calendar'].__dict__[obj]
48 assert __main__.day_name.__module__ == 'calendar'
47 assert globals()[obj] is sys.modules['calendar'].__dict__[obj] 48 assert __main__.day_name.__module__ == 'calendar' 49 if refimported:
49 if refimported: 50 assert __main__.day_name is calendar.day_name 51
51 52 assert __main__.complex_log is cmath.log 53
69 import urllib as url # top-level module under alias 70 from xml import sax # submodule 71 import xml.dom.minidom as dom # submodule under alias
70 from xml import sax # submodule 71 import xml.dom.minidom as dom # submodule under alias 72 import test_dictviews as local_mod # non-builtin top-level module
125 for obj in ('json', 'url', 'local_mod', 'sax', 'dom'):
126 assert globals()[obj].__name__ == globals_copy[obj].__name__
127
128 for obj in ('x', 'empty', 'names'):
129 assert main_dict[obj] == globals_copy[obj]
130
131 for obj in ['squared', 'cubed']: 132 assert main_dict[obj].__globals__ is main_dict 133 assert main_dict[obj](3) == globals_copy[obj](3)
132 assert main_dict[obj].__globals__ is main_dict 133 assert main_dict[obj](3) == globals_copy[obj](3) 134
134 135 assert Person.__module__ == __main__.__name__ 136 assert isinstance(person, Person)
135 assert Person.__module__ == __main__.__name__ 136 assert isinstance(person, Person) 137 assert person.age == globals_copy['person'].age
136 assert isinstance(person, Person) 137 assert person.age == globals_copy['person'].age 138
138 139 assert issubclass(CalendarSubclass, Calendar) 140 assert isinstance(cal, CalendarSubclass)
139 assert issubclass(CalendarSubclass, Calendar) 140 assert isinstance(cal, CalendarSubclass) 141 assert cal.weekdays() == globals_copy['cal'].weekdays()
140 assert isinstance(cal, CalendarSubclass) 141 assert cal.weekdays() == globals_copy['cal'].weekdays() 142
142 143 assert selfref is __main__ 144
160 dill.dump_module(session_file % refimported, refimported=refimported) 161 from dill.tests.__main__ import python, shell, sp 162 error = sp.call([python, __file__, '--child', str(refimported)], shell=shell)
161 from dill.tests.__main__ import python, shell, sp 162 error = sp.call([python, __file__, '--child', str(refimported)], shell=shell) 163 if error: sys.exit(error)
190 191 assert all(obj in module.__dict__ for obj in dict_objects) 192 assert module.selfref is module
191 assert all(obj in module.__dict__ for obj in dict_objects) 192 assert module.selfref is module 193
215 return_val = dill.load_module(BytesIO(session_dump), module=runtime) 216 assert return_val is None 217 assert runtime.__name__ == modname
216 assert return_val is None 217 assert runtime.__name__ == modname 218 assert runtime.x == 42
217 assert runtime.__name__ == modname 218 assert runtime.x == 42 219 assert runtime not in sys.modules.values()
218 assert runtime.x == 42 219 assert runtime not in sys.modules.values() 220
223 runtime = dill.load_module(BytesIO(session_dump)) 224 assert runtime.__name__ == modname 225 assert runtime.x == 42
224 assert runtime.__name__ == modname 225 assert runtime.x == 42 226 assert runtime not in sys.modules.values()
225 assert runtime.x == 42 226 assert runtime not in sys.modules.values() 227
241 session_buffer.seek(0) 242 mod = dill.load(session_buffer) 243 del sys.modules['__test__']
244
245 assert set(mod.__dill_imported_as) == {
246 ('collections', 'UserDict', 'Dict'),
247 ('typing', 'AsyncContextManager', 'AsyncCM'),
248 ('dill', 'executor', 'thread_exec'),
249 }
250
263 264 assert main_vars is not globals() 265 assert globals() == globals_state
264 assert main_vars is not globals() 265 assert globals() == globals_state 266
266 267 assert main_vars['__name__'] == '__main__' 268 assert main_vars['names'] == names
267 assert main_vars['__name__'] == '__main__' 268 assert main_vars['names'] == names 269 assert main_vars['names'] is not names
268 assert main_vars['names'] == names 269 assert main_vars['names'] is not names 270 assert main_vars['x'] != x
269 assert main_vars['names'] is not names 270 assert main_vars['x'] != x 271 assert 'y' not in main_vars
270 assert main_vars['x'] != x 271 assert 'y' not in main_vars 272 assert 'empty' in main_vars
271 assert 'y' not in main_vars 272 assert 'empty' in main_vars 273
8 9 from dill.source import getsource, getname, _wrap, getimport 10 from dill.source import importable
9 from dill.source import getsource, getname, _wrap, getimport 10 from dill.source import importable 11 from dill._dill import IS_PYPY
10 from dill.source import importable 11 from dill._dill import IS_PYPY 12
39 def test_getsource(): 40 assert getsource(f) == 'f = lambda x: x**2\n' 41 assert getsource(g) == 'def g(x): return f(x) - x\n'
40 assert getsource(f) == 'f = lambda x: x**2\n' 41 assert getsource(g) == 'def g(x): return f(x) - x\n' 42 assert getsource(h) == 'def h(x):\n def g(x): return x\n return g(x) - x\n'
41 assert getsource(g) == 'def g(x): return f(x) - x\n' 42 assert getsource(h) == 'def h(x):\n def g(x): return x\n return g(x) - x\n' 43 assert getname(f) == 'f'
42 assert getsource(h) == 'def h(x):\n def g(x): return x\n return g(x) - x\n' 43 assert getname(f) == 'f' 44 assert getname(g) == 'g'
43 assert getname(f) == 'f' 44 assert getname(g) == 'g' 45 assert getname(h) == 'h'
44 assert getname(g) == 'g' 45 assert getname(h) == 'h' 46 assert _wrap(f)(4) == 16
45 assert getname(h) == 'h' 46 assert _wrap(f)(4) == 16 47 assert _wrap(g)(4) == 12
46 assert _wrap(f)(4) == 16 47 assert _wrap(g)(4) == 12 48 assert _wrap(h)(4) == 0
47 assert _wrap(g)(4) == 12 48 assert _wrap(h)(4) == 0 49
49 50 assert getname(Foo) == 'Foo' 51 assert getname(Bar) == 'Bar'
50 assert getname(Foo) == 'Foo' 51 assert getname(Bar) == 'Bar' 52 assert getsource(Bar) == 'class Bar:\n pass\n'
51 assert getname(Bar) == 'Bar' 52 assert getsource(Bar) == 'class Bar:\n pass\n' 53 assert getsource(Foo) == 'class Foo(object):\n def bar(self, x):\n return x*x+x\n'
52 assert getsource(Bar) == 'class Bar:\n pass\n' 53 assert getsource(Foo) == 'class Foo(object):\n def bar(self, x):\n return x*x+x\n' 54 #XXX: add getsource for _foo, _bar
57 def test_itself(): 58 assert getimport(getimport)=='from dill.source import getimport\n' 59
61 def test_builtin(): 62 assert getimport(pow) == 'pow\n' 63 assert getimport(100) == '100\n'
62 assert getimport(pow) == 'pow\n' 63 assert getimport(100) == '100\n' 64 assert getimport(True) == 'True\n'
63 assert getimport(100) == '100\n' 64 assert getimport(True) == 'True\n' 65 assert getimport(pow, builtin=True) == 'from builtins import pow\n'
64 assert getimport(True) == 'True\n' 65 assert getimport(pow, builtin=True) == 'from builtins import pow\n' 66 assert getimport(100, builtin=True) == '100\n'
65 assert getimport(pow, builtin=True) == 'from builtins import pow\n' 66 assert getimport(100, builtin=True) == '100\n' 67 assert getimport(True, builtin=True) == 'True\n'
66 assert getimport(100, builtin=True) == '100\n' 67 assert getimport(True, builtin=True) == 'True\n' 68 # this is kinda BS... you can't import a None
68 # this is kinda BS... you can't import a None 69 assert getimport(None) == 'None\n' 70 assert getimport(None, builtin=True) == 'None\n'
69 assert getimport(None) == 'None\n' 70 assert getimport(None, builtin=True) == 'None\n' 71
75 from math import sin 76 assert getimport(sin) == 'from math import sin\n' 77
79 def test_dynamic(): 80 assert getimport(add) == 'from %s import add\n' % __name__ 81 # interactive lambdas
81 # interactive lambdas 82 assert getimport(squared) == 'from %s import squared\n' % __name__ 83
90 91 assert getimport(StringIO) == x 92 assert getimport(s) == y
91 assert getimport(StringIO) == x 92 assert getimport(s) == y 93 # interactively defined classes and class instances
93 # interactively defined classes and class instances 94 assert getimport(Foo) == 'from %s import Foo\n' % __name__ 95 assert getimport(_foo) == 'from %s import Foo\n' % __name__
94 assert getimport(Foo) == 'from %s import Foo\n' % __name__ 95 assert getimport(_foo) == 'from %s import Foo\n' % __name__ 96
99 def test_importable(): 100 assert importable(add, source=False) == 'from %s import add\n' % __name__ 101 assert importable(squared, source=False) == 'from %s import squared\n' % __name__
100 assert importable(add, source=False) == 'from %s import add\n' % __name__ 101 assert importable(squared, source=False) == 'from %s import squared\n' % __name__ 102 assert importable(Foo, source=False) == 'from %s import Foo\n' % __name__
101 assert importable(squared, source=False) == 'from %s import squared\n' % __name__ 102 assert importable(Foo, source=False) == 'from %s import Foo\n' % __name__ 103 assert importable(Foo.bar, source=False) == 'from %s import bar\n' % __name__
102 assert importable(Foo, source=False) == 'from %s import Foo\n' % __name__ 103 assert importable(Foo.bar, source=False) == 'from %s import bar\n' % __name__ 104 assert importable(_foo.bar, source=False) == 'from %s import bar\n' % __name__
103 assert importable(Foo.bar, source=False) == 'from %s import bar\n' % __name__ 104 assert importable(_foo.bar, source=False) == 'from %s import bar\n' % __name__ 105 assert importable(None, source=False) == 'None\n'
104 assert importable(_foo.bar, source=False) == 'from %s import bar\n' % __name__ 105 assert importable(None, source=False) == 'None\n' 106 assert importable(100, source=False) == '100\n'
105 assert importable(None, source=False) == 'None\n' 106 assert importable(100, source=False) == '100\n' 107
107 108 assert importable(add, source=True) == 'def add(x,y):\n return x+y\n' 109 assert importable(squared, source=True) == 'squared = lambda x:x**2\n'
108 assert importable(add, source=True) == 'def add(x,y):\n return x+y\n' 109 assert importable(squared, source=True) == 'squared = lambda x:x**2\n' 110 assert importable(None, source=True) == 'None\n'
109 assert importable(squared, source=True) == 'squared = lambda x:x**2\n' 110 assert importable(None, source=True) == 'None\n' 111 assert importable(Bar, source=True) == 'class Bar:\n pass\n'
110 assert importable(None, source=True) == 'None\n' 111 assert importable(Bar, source=True) == 'class Bar:\n pass\n' 112 assert importable(Foo, source=True) == 'class Foo(object):\n def bar(self, x):\n return x*x+x\n'
111 assert importable(Bar, source=True) == 'class Bar:\n pass\n' 112 assert importable(Foo, source=True) == 'class Foo(object):\n def bar(self, x):\n return x*x+x\n' 113 assert importable(Foo.bar, source=True) == 'def bar(self, x):\n return x*x+x\n'
112 assert importable(Foo, source=True) == 'class Foo(object):\n def bar(self, x):\n return x*x+x\n' 113 assert importable(Foo.bar, source=True) == 'def bar(self, x):\n return x*x+x\n' 114 assert importable(Foo.bar, source=False) == 'from %s import bar\n' % __name__
113 assert importable(Foo.bar, source=True) == 'def bar(self, x):\n return x*x+x\n' 114 assert importable(Foo.bar, source=False) == 'from %s import bar\n' % __name__ 115 assert importable(Foo.bar, alias='memo', source=False) == 'from %s import bar as memo\n' % __name__
114 assert importable(Foo.bar, source=False) == 'from %s import bar\n' % __name__ 115 assert importable(Foo.bar, alias='memo', source=False) == 'from %s import bar as memo\n' % __name__ 116 assert importable(Foo, alias='memo', source=False) == 'from %s import Foo as memo\n' % __name__
115 assert importable(Foo.bar, alias='memo', source=False) == 'from %s import bar as memo\n' % __name__ 116 assert importable(Foo, alias='memo', source=False) == 'from %s import Foo as memo\n' % __name__ 117 assert importable(squared, alias='memo', source=False) == 'from %s import squared as memo\n' % __name__
116 assert importable(Foo, alias='memo', source=False) == 'from %s import Foo as memo\n' % __name__ 117 assert importable(squared, alias='memo', source=False) == 'from %s import squared as memo\n' % __name__ 118 assert importable(squared, alias='memo', source=True) == 'memo = squared = lambda x:x**2\n'
117 assert importable(squared, alias='memo', source=False) == 'from %s import squared as memo\n' % __name__ 118 assert importable(squared, alias='memo', source=True) == 'memo = squared = lambda x:x**2\n' 119 assert importable(add, alias='memo', source=True) == 'def add(x,y):\n return x+y\n\nmemo = add\n'
118 assert importable(squared, alias='memo', source=True) == 'memo = squared = lambda x:x**2\n' 119 assert importable(add, alias='memo', source=True) == 'def add(x,y):\n return x+y\n\nmemo = add\n' 120 assert importable(None, alias='memo', source=True) == 'memo = None\n'
119 assert importable(add, alias='memo', source=True) == 'def add(x,y):\n return x+y\n\nmemo = add\n' 120 assert importable(None, alias='memo', source=True) == 'memo = None\n' 121 assert importable(100, alias='memo', source=True) == 'memo = 100\n'
120 assert importable(None, alias='memo', source=True) == 'memo = None\n' 121 assert importable(100, alias='memo', source=True) == 'memo = 100\n' 122 assert importable(add, builtin=True, source=False) == 'from %s import add\n' % __name__
121 assert importable(100, alias='memo', source=True) == 'memo = 100\n' 122 assert importable(add, builtin=True, source=False) == 'from %s import add\n' % __name__ 123 assert importable(squared, builtin=True, source=False) == 'from %s import squared\n' % __name__
122 assert importable(add, builtin=True, source=False) == 'from %s import add\n' % __name__ 123 assert importable(squared, builtin=True, source=False) == 'from %s import squared\n' % __name__ 124 assert importable(Foo, builtin=True, source=False) == 'from %s import Foo\n' % __name__
123 assert importable(squared, builtin=True, source=False) == 'from %s import squared\n' % __name__ 124 assert importable(Foo, builtin=True, source=False) == 'from %s import Foo\n' % __name__ 125 assert importable(Foo.bar, builtin=True, source=False) == 'from %s import bar\n' % __name__
124 assert importable(Foo, builtin=True, source=False) == 'from %s import Foo\n' % __name__ 125 assert importable(Foo.bar, builtin=True, source=False) == 'from %s import bar\n' % __name__ 126 assert importable(_foo.bar, builtin=True, source=False) == 'from %s import bar\n' % __name__
125 assert importable(Foo.bar, builtin=True, source=False) == 'from %s import bar\n' % __name__ 126 assert importable(_foo.bar, builtin=True, source=False) == 'from %s import bar\n' % __name__ 127 assert importable(None, builtin=True, source=False) == 'None\n'
126 assert importable(_foo.bar, builtin=True, source=False) == 'from %s import bar\n' % __name__ 127 assert importable(None, builtin=True, source=False) == 'None\n' 128 assert importable(100, builtin=True, source=False) == '100\n'
127 assert importable(None, builtin=True, source=False) == 'None\n' 128 assert importable(100, builtin=True, source=False) == '100\n' 129
135 x = y([1,2,3]) 136 assert importable(x, source=False) == 'from numpy import array\narray([1, 2, 3])\n' 137 assert importable(y, source=False) == 'from %s import array\n' % y.__module__
136 assert importable(x, source=False) == 'from numpy import array\narray([1, 2, 3])\n' 137 assert importable(y, source=False) == 'from %s import array\n' % y.__module__ 138 assert importable(x, source=True) == 'from numpy import array\narray([1, 2, 3])\n'
137 assert importable(y, source=False) == 'from %s import array\n' % y.__module__ 138 assert importable(x, source=True) == 'from numpy import array\narray([1, 2, 3])\n' 139 assert importable(y, source=True) == 'from %s import array\n' % y.__module__
138 assert importable(x, source=True) == 'from numpy import array\narray([1, 2, 3])\n' 139 assert importable(y, source=True) == 'from %s import array\n' % y.__module__ 140 y = np.int64
141 x = y(0) 142 assert importable(x, source=False) == 'from numpy import int64\nint64(0)\n' 143 assert importable(y, source=False) == 'from %s import int64\n' % y.__module__
142 assert importable(x, source=False) == 'from numpy import int64\nint64(0)\n' 143 assert importable(y, source=False) == 'from %s import int64\n' % y.__module__ 144 assert importable(x, source=True) == 'from numpy import int64\nint64(0)\n'
143 assert importable(y, source=False) == 'from %s import int64\n' % y.__module__ 144 assert importable(x, source=True) == 'from numpy import int64\nint64(0)\n' 145 assert importable(y, source=True) == 'from %s import int64\n' % y.__module__
144 assert importable(x, source=True) == 'from numpy import int64\nint64(0)\n' 145 assert importable(y, source=True) == 'from %s import int64\n' % y.__module__ 146 y = np.bool_
153 else: b = 'bool_' 154 assert importable(x, source=False) == 'from numpy import %s\n%s(False)\n' % (b,b) 155 assert importable(y, source=False) == 'from %s import %s\n' % (y.__module__,b)
154 assert importable(x, source=False) == 'from numpy import %s\n%s(False)\n' % (b,b) 155 assert importable(y, source=False) == 'from %s import %s\n' % (y.__module__,b) 156 assert importable(x, source=True) == 'from numpy import %s\n%s(False)\n' % (b,b)
155 assert importable(y, source=False) == 'from %s import %s\n' % (y.__module__,b) 156 assert importable(x, source=True) == 'from numpy import %s\n%s(False)\n' % (b,b) 157 assert importable(y, source=True) == 'from %s import %s\n' % (y.__module__,b)
156 assert importable(x, source=True) == 'from numpy import %s\n%s(False)\n' % (b,b) 157 assert importable(y, source=True) == 'from %s import %s\n' % (y.__module__,b) 158 except ImportError: pass
161 def test_foo():
162 assert importable(_foo, source=True).startswith("import dill\nclass Foo(object):\n def bar(self, x):\n return x*x+x\ndill.loads(")
163
38 39 import dill.source as ds 40
42 def test_isfrommain(): 43 assert ds.isfrommain(add) == True 44 assert ds.isfrommain(squared) == True
43 assert ds.isfrommain(add) == True 44 assert ds.isfrommain(squared) == True 45 assert ds.isfrommain(Bar) == True
44 assert ds.isfrommain(squared) == True 45 assert ds.isfrommain(Bar) == True 46 assert ds.isfrommain(_bar) == True
45 assert ds.isfrommain(Bar) == True 46 assert ds.isfrommain(_bar) == True 47 assert ds.isfrommain(ts.add) == False
46 assert ds.isfrommain(_bar) == True 47 assert ds.isfrommain(ts.add) == False 48 assert ds.isfrommain(ts.squared) == False
47 assert ds.isfrommain(ts.add) == False 48 assert ds.isfrommain(ts.squared) == False 49 assert ds.isfrommain(ts.Bar) == False
48 assert ds.isfrommain(ts.squared) == False 49 assert ds.isfrommain(ts.Bar) == False 50 assert ds.isfrommain(ts._bar) == False
49 assert ds.isfrommain(ts.Bar) == False 50 assert ds.isfrommain(ts._bar) == False 51 assert ds.isfrommain(tm.quad) == False
50 assert ds.isfrommain(ts._bar) == False 51 assert ds.isfrommain(tm.quad) == False 52 assert ds.isfrommain(tm.double_add) == False
51 assert ds.isfrommain(tm.quad) == False 52 assert ds.isfrommain(tm.double_add) == False 53 assert ds.isfrommain(tm.quadratic) == False
52 assert ds.isfrommain(tm.double_add) == False 53 assert ds.isfrommain(tm.quadratic) == False 54 assert ds.isdynamic(add) == False
53 assert ds.isfrommain(tm.quadratic) == False 54 assert ds.isdynamic(add) == False 55 assert ds.isdynamic(squared) == False
54 assert ds.isdynamic(add) == False 55 assert ds.isdynamic(squared) == False 56 assert ds.isdynamic(ts.add) == False
55 assert ds.isdynamic(squared) == False 56 assert ds.isdynamic(ts.add) == False 57 assert ds.isdynamic(ts.squared) == False
56 assert ds.isdynamic(ts.add) == False 57 assert ds.isdynamic(ts.squared) == False 58 assert ds.isdynamic(tm.double_add) == False
57 assert ds.isdynamic(ts.squared) == False 58 assert ds.isdynamic(tm.double_add) == False 59 assert ds.isdynamic(tm.quadratic) == False
58 assert ds.isdynamic(tm.double_add) == False 59 assert ds.isdynamic(tm.quadratic) == False 60
62 def test_matchlambda(): 63 assert ds._matchlambda(f, 'f = lambda x: x**2\n') 64 assert ds._matchlambda(squared, 'squared = lambda x:x**2\n')
63 assert ds._matchlambda(f, 'f = lambda x: x**2\n') 64 assert ds._matchlambda(squared, 'squared = lambda x:x**2\n') 65 assert ds._matchlambda(ts.f, 'f = lambda x: x**2\n')
64 assert ds._matchlambda(squared, 'squared = lambda x:x**2\n') 65 assert ds._matchlambda(ts.f, 'f = lambda x: x**2\n') 66 assert ds._matchlambda(ts.squared, 'squared = lambda x:x**2\n')
65 assert ds._matchlambda(ts.f, 'f = lambda x: x**2\n') 66 assert ds._matchlambda(ts.squared, 'squared = lambda x:x**2\n') 67
70 lines, lineno = ds.findsource(add) 71 assert lines[lineno] == 'def add(x,y):\n' 72 lines, lineno = ds.findsource(ts.add)
72 lines, lineno = ds.findsource(ts.add) 73 assert lines[lineno] == 'def add(x,y):\n' 74 lines, lineno = ds.findsource(squared)
74 lines, lineno = ds.findsource(squared) 75 assert lines[lineno] == 'squared = lambda x:x**2\n' 76 lines, lineno = ds.findsource(ts.squared)
76 lines, lineno = ds.findsource(ts.squared) 77 assert lines[lineno] == 'squared = lambda x:x**2\n' 78 lines, lineno = ds.findsource(Bar)
78 lines, lineno = ds.findsource(Bar) 79 assert lines[lineno] == 'class Bar:\n' 80 lines, lineno = ds.findsource(ts.Bar)
80 lines, lineno = ds.findsource(ts.Bar) 81 assert lines[lineno] == 'class Bar:\n' 82 lines, lineno = ds.findsource(_bar)
82 lines, lineno = ds.findsource(_bar) 83 assert lines[lineno] == 'class Bar:\n' 84 lines, lineno = ds.findsource(ts._bar)
84 lines, lineno = ds.findsource(ts._bar) 85 assert lines[lineno] == 'class Bar:\n' 86 lines, lineno = ds.findsource(tm.quad)
86 lines, lineno = ds.findsource(tm.quad) 87 assert lines[lineno] == 'def quad(a=1, b=1, c=0):\n' 88 lines, lineno = ds.findsource(tm.double_add)
88 lines, lineno = ds.findsource(tm.double_add) 89 assert lines[lineno] == ' def func(*args, **kwds):\n' 90 lines, lineno = ds.findsource(tm.quadratic)
90 lines, lineno = ds.findsource(tm.quadratic) 91 assert lines[lineno] == ' def dec(f):\n' 92
94 def test_getsourcelines(): 95 assert ''.join(ds.getsourcelines(add)[0]) == 'def add(x,y):\n return x+y\n' 96 assert ''.join(ds.getsourcelines(ts.add)[0]) == 'def add(x,y):\n return x+y\n'
95 assert ''.join(ds.getsourcelines(add)[0]) == 'def add(x,y):\n return x+y\n' 96 assert ''.join(ds.getsourcelines(ts.add)[0]) == 'def add(x,y):\n return x+y\n' 97 assert ''.join(ds.getsourcelines(squared)[0]) == 'squared = lambda x:x**2\n'
96 assert ''.join(ds.getsourcelines(ts.add)[0]) == 'def add(x,y):\n return x+y\n' 97 assert ''.join(ds.getsourcelines(squared)[0]) == 'squared = lambda x:x**2\n' 98 assert ''.join(ds.getsourcelines(ts.squared)[0]) == 'squared = lambda x:x**2\n'
97 assert ''.join(ds.getsourcelines(squared)[0]) == 'squared = lambda x:x**2\n' 98 assert ''.join(ds.getsourcelines(ts.squared)[0]) == 'squared = lambda x:x**2\n' 99 assert ''.join(ds.getsourcelines(Bar)[0]) == 'class Bar:\n pass\n'
98 assert ''.join(ds.getsourcelines(ts.squared)[0]) == 'squared = lambda x:x**2\n' 99 assert ''.join(ds.getsourcelines(Bar)[0]) == 'class Bar:\n pass\n' 100 assert ''.join(ds.getsourcelines(ts.Bar)[0]) == 'class Bar:\n pass\n'
99 assert ''.join(ds.getsourcelines(Bar)[0]) == 'class Bar:\n pass\n' 100 assert ''.join(ds.getsourcelines(ts.Bar)[0]) == 'class Bar:\n pass\n' 101 assert ''.join(ds.getsourcelines(_bar)[0]) == 'class Bar:\n pass\n' #XXX: ?
100 assert ''.join(ds.getsourcelines(ts.Bar)[0]) == 'class Bar:\n pass\n' 101 assert ''.join(ds.getsourcelines(_bar)[0]) == 'class Bar:\n pass\n' #XXX: ? 102 assert ''.join(ds.getsourcelines(ts._bar)[0]) == 'class Bar:\n pass\n' #XXX: ?
101 assert ''.join(ds.getsourcelines(_bar)[0]) == 'class Bar:\n pass\n' #XXX: ? 102 assert ''.join(ds.getsourcelines(ts._bar)[0]) == 'class Bar:\n pass\n' #XXX: ? 103 assert ''.join(ds.getsourcelines(tm.quad)[0]) == 'def quad(a=1, b=1, c=0):\n inverted = [False]\n def invert():\n inverted[0] = not inverted[0]\n def dec(f):\n def func(*args, **kwds):\n x = f(*args, **kwds)\n if inverted[0]: x = -x\n return a*x**2 + b*x + c\n func.__wrapped__ = f\n func.invert = invert\n func.inverted = inverted\n return func\n return dec\n'
102 assert ''.join(ds.getsourcelines(ts._bar)[0]) == 'class Bar:\n pass\n' #XXX: ? 103 assert ''.join(ds.getsourcelines(tm.quad)[0]) == 'def quad(a=1, b=1, c=0):\n inverted = [False]\n def invert():\n inverted[0] = not inverted[0]\n def dec(f):\n def func(*args, **kwds):\n x = f(*args, **kwds)\n if inverted[0]: x = -x\n return a*x**2 + b*x + c\n func.__wrapped__ = f\n func.invert = invert\n func.inverted = inverted\n return func\n return dec\n' 104 assert ''.join(ds.getsourcelines(tm.quadratic)[0]) == ' def dec(f):\n def func(*args,**kwds):\n fx = f(*args,**kwds)\n return a*fx**2 + b*fx + c\n return func\n'
103 assert ''.join(ds.getsourcelines(tm.quad)[0]) == 'def quad(a=1, b=1, c=0):\n inverted = [False]\n def invert():\n inverted[0] = not inverted[0]\n def dec(f):\n def func(*args, **kwds):\n x = f(*args, **kwds)\n if inverted[0]: x = -x\n return a*x**2 + b*x + c\n func.__wrapped__ = f\n func.invert = invert\n func.inverted = inverted\n return func\n return dec\n' 104 assert ''.join(ds.getsourcelines(tm.quadratic)[0]) == ' def dec(f):\n def func(*args,**kwds):\n fx = f(*args,**kwds)\n return a*fx**2 + b*fx + c\n return func\n' 105 assert ''.join(ds.getsourcelines(tm.quadratic, lstrip=True)[0]) == 'def dec(f):\n def func(*args,**kwds):\n fx = f(*args,**kwds)\n return a*fx**2 + b*fx + c\n return func\n'
104 assert ''.join(ds.getsourcelines(tm.quadratic)[0]) == ' def dec(f):\n def func(*args,**kwds):\n fx = f(*args,**kwds)\n return a*fx**2 + b*fx + c\n return func\n' 105 assert ''.join(ds.getsourcelines(tm.quadratic, lstrip=True)[0]) == 'def dec(f):\n def func(*args,**kwds):\n fx = f(*args,**kwds)\n return a*fx**2 + b*fx + c\n return func\n' 106 assert ''.join(ds.getsourcelines(tm.quadratic, enclosing=True)[0]) == 'def quad_factory(a=1,b=1,c=0):\n def dec(f):\n def func(*args,**kwds):\n fx = f(*args,**kwds)\n return a*fx**2 + b*fx + c\n return func\n return dec\n'
105 assert ''.join(ds.getsourcelines(tm.quadratic, lstrip=True)[0]) == 'def dec(f):\n def func(*args,**kwds):\n fx = f(*args,**kwds)\n return a*fx**2 + b*fx + c\n return func\n' 106 assert ''.join(ds.getsourcelines(tm.quadratic, enclosing=True)[0]) == 'def quad_factory(a=1,b=1,c=0):\n def dec(f):\n def func(*args,**kwds):\n fx = f(*args,**kwds)\n return a*fx**2 + b*fx + c\n return func\n return dec\n' 107 assert ''.join(ds.getsourcelines(tm.double_add)[0]) == ' def func(*args, **kwds):\n x = f(*args, **kwds)\n if inverted[0]: x = -x\n return a*x**2 + b*x + c\n'
106 assert ''.join(ds.getsourcelines(tm.quadratic, enclosing=True)[0]) == 'def quad_factory(a=1,b=1,c=0):\n def dec(f):\n def func(*args,**kwds):\n fx = f(*args,**kwds)\n return a*fx**2 + b*fx + c\n return func\n return dec\n' 107 assert ''.join(ds.getsourcelines(tm.double_add)[0]) == ' def func(*args, **kwds):\n x = f(*args, **kwds)\n if inverted[0]: x = -x\n return a*x**2 + b*x + c\n' 108 assert ''.join(ds.getsourcelines(tm.double_add, enclosing=True)[0]) == 'def quad(a=1, b=1, c=0):\n inverted = [False]\n def invert():\n inverted[0] = not inverted[0]\n def dec(f):\n def func(*args, **kwds):\n x = f(*args, **kwds)\n if inverted[0]: x = -x\n return a*x**2 + b*x + c\n func.__wrapped__ = f\n func.invert = invert\n func.inverted = inverted\n return func\n return dec\n'
107 assert ''.join(ds.getsourcelines(tm.double_add)[0]) == ' def func(*args, **kwds):\n x = f(*args, **kwds)\n if inverted[0]: x = -x\n return a*x**2 + b*x + c\n' 108 assert ''.join(ds.getsourcelines(tm.double_add, enclosing=True)[0]) == 'def quad(a=1, b=1, c=0):\n inverted = [False]\n def invert():\n inverted[0] = not inverted[0]\n def dec(f):\n def func(*args, **kwds):\n x = f(*args, **kwds)\n if inverted[0]: x = -x\n return a*x**2 + b*x + c\n func.__wrapped__ = f\n func.invert = invert\n func.inverted = inverted\n return func\n return dec\n' 109
111 def test_indent():
112 assert ds.outdent(''.join(ds.getsourcelines(tm.quadratic)[0])) == ''.join(ds.getsourcelines(tm.quadratic, lstrip=True)[0])
113 assert ds.indent(''.join(ds.getsourcelines(tm.quadratic, lstrip=True)[0]), 2) == ''.join(ds.getsourcelines(tm.quadratic)[0])
112 assert ds.outdent(''.join(ds.getsourcelines(tm.quadratic)[0])) == ''.join(ds.getsourcelines(tm.quadratic, lstrip=True)[0])
113 assert ds.indent(''.join(ds.getsourcelines(tm.quadratic, lstrip=True)[0]), 2) == ''.join(ds.getsourcelines(tm.quadratic)[0])
114
117 local = {}
118 exec(ds.dumpsource(add, alias='raw'), {}, local)
119 exec(ds.dumpsource(ts.add, alias='mod'), {}, local)
118 exec(ds.dumpsource(add, alias='raw'), {}, local)
119 exec(ds.dumpsource(ts.add, alias='mod'), {}, local)
120 assert local['raw'](1,2) == local['mod'](1,2)
119 exec(ds.dumpsource(ts.add, alias='mod'), {}, local)
120 assert local['raw'](1,2) == local['mod'](1,2)
121 exec(ds.dumpsource(squared, alias='raw'), {}, local)
120 assert local['raw'](1,2) == local['mod'](1,2)
121 exec(ds.dumpsource(squared, alias='raw'), {}, local)
122 exec(ds.dumpsource(ts.squared, alias='mod'), {}, local)
121 exec(ds.dumpsource(squared, alias='raw'), {}, local)
122 exec(ds.dumpsource(ts.squared, alias='mod'), {}, local)
123 assert local['raw'](3) == local['mod'](3)
122 exec(ds.dumpsource(ts.squared, alias='mod'), {}, local)
123 assert local['raw'](3) == local['mod'](3)
124 assert ds._wrap(add)(1,2) == ds._wrap(ts.add)(1,2)
123 assert local['raw'](3) == local['mod'](3) 124 assert ds._wrap(add)(1,2) == ds._wrap(ts.add)(1,2) 125 assert ds._wrap(squared)(3) == ds._wrap(ts.squared)(3)
124 assert ds._wrap(add)(1,2) == ds._wrap(ts.add)(1,2) 125 assert ds._wrap(squared)(3) == ds._wrap(ts.squared)(3) 126
128 def test_name():
129 assert ds._namespace(add) == ds.getname(add, fqn=True).split('.')
130 assert ds._namespace(ts.add) == ds.getname(ts.add, fqn=True).split('.')
129 assert ds._namespace(add) == ds.getname(add, fqn=True).split('.')
130 assert ds._namespace(ts.add) == ds.getname(ts.add, fqn=True).split('.')
131 assert ds._namespace(squared) == ds.getname(squared, fqn=True).split('.')
130 assert ds._namespace(ts.add) == ds.getname(ts.add, fqn=True).split('.')
131 assert ds._namespace(squared) == ds.getname(squared, fqn=True).split('.')
132 assert ds._namespace(ts.squared) == ds.getname(ts.squared, fqn=True).split('.')
131 assert ds._namespace(squared) == ds.getname(squared, fqn=True).split('.')
132 assert ds._namespace(ts.squared) == ds.getname(ts.squared, fqn=True).split('.')
133 assert ds._namespace(Bar) == ds.getname(Bar, fqn=True).split('.')
132 assert ds._namespace(ts.squared) == ds.getname(ts.squared, fqn=True).split('.')
133 assert ds._namespace(Bar) == ds.getname(Bar, fqn=True).split('.')
134 assert ds._namespace(ts.Bar) == ds.getname(ts.Bar, fqn=True).split('.')
133 assert ds._namespace(Bar) == ds.getname(Bar, fqn=True).split('.')
134 assert ds._namespace(ts.Bar) == ds.getname(ts.Bar, fqn=True).split('.')
135 assert ds._namespace(tm.quad) == ds.getname(tm.quad, fqn=True).split('.')
134 assert ds._namespace(ts.Bar) == ds.getname(ts.Bar, fqn=True).split('.')
135 assert ds._namespace(tm.quad) == ds.getname(tm.quad, fqn=True).split('.')
136 #XXX: the following also works, however behavior may be wrong for nested functions
138 #assert ds._namespace(tm.quadratic) == ds.getname(tm.quadratic, fqn=True).split('.')
139 assert ds.getname(add) == 'add'
140 assert ds.getname(ts.add) == 'add'
139 assert ds.getname(add) == 'add' 140 assert ds.getname(ts.add) == 'add' 141 assert ds.getname(squared) == 'squared'
140 assert ds.getname(ts.add) == 'add' 141 assert ds.getname(squared) == 'squared' 142 assert ds.getname(ts.squared) == 'squared'
141 assert ds.getname(squared) == 'squared' 142 assert ds.getname(ts.squared) == 'squared' 143 assert ds.getname(Bar) == 'Bar'
142 assert ds.getname(ts.squared) == 'squared' 143 assert ds.getname(Bar) == 'Bar' 144 assert ds.getname(ts.Bar) == 'Bar'
143 assert ds.getname(Bar) == 'Bar' 144 assert ds.getname(ts.Bar) == 'Bar' 145 assert ds.getname(tm.quad) == 'quad'
144 assert ds.getname(ts.Bar) == 'Bar' 145 assert ds.getname(tm.quad) == 'quad' 146 assert ds.getname(tm.double_add) == 'func' #XXX: ?
145 assert ds.getname(tm.quad) == 'quad' 146 assert ds.getname(tm.double_add) == 'func' #XXX: ? 147 assert ds.getname(tm.quadratic) == 'dec' #XXX: ?
146 assert ds.getname(tm.double_add) == 'func' #XXX: ? 147 assert ds.getname(tm.quadratic) == 'dec' #XXX: ? 148
151 local = {}
152 exec(ds.getimport(add, alias='raw'), {}, local)
153 exec(ds.getimport(ts.add, alias='mod'), {}, local)
152 exec(ds.getimport(add, alias='raw'), {}, local)
153 exec(ds.getimport(ts.add, alias='mod'), {}, local)
154 assert local['raw'](1,2) == local['mod'](1,2)
153 exec(ds.getimport(ts.add, alias='mod'), {}, local)
154 assert local['raw'](1,2) == local['mod'](1,2)
155 exec(ds.getimport(squared, alias='raw'), {}, local)
154 assert local['raw'](1,2) == local['mod'](1,2)
155 exec(ds.getimport(squared, alias='raw'), {}, local)
156 exec(ds.getimport(ts.squared, alias='mod'), {}, local)
155 exec(ds.getimport(squared, alias='raw'), {}, local)
156 exec(ds.getimport(ts.squared, alias='mod'), {}, local)
157 assert local['raw'](3) == local['mod'](3)
156 exec(ds.getimport(ts.squared, alias='mod'), {}, local)
157 assert local['raw'](3) == local['mod'](3)
158 exec(ds.getimport(Bar, alias='raw'), {}, local)
157 assert local['raw'](3) == local['mod'](3)
158 exec(ds.getimport(Bar, alias='raw'), {}, local)
159 exec(ds.getimport(ts.Bar, alias='mod'), {}, local)
158 exec(ds.getimport(Bar, alias='raw'), {}, local)
159 exec(ds.getimport(ts.Bar, alias='mod'), {}, local)
160 assert ds.getname(local['raw']) == ds.getname(local['mod'])
159 exec(ds.getimport(ts.Bar, alias='mod'), {}, local)
160 assert ds.getname(local['raw']) == ds.getname(local['mod'])
161 exec(ds.getimport(tm.quad, alias='mod'), {}, local)
160 assert ds.getname(local['raw']) == ds.getname(local['mod'])
161 exec(ds.getimport(tm.quad, alias='mod'), {}, local)
162 assert local['mod']()(sum)([1,2,3]) == tm.quad()(sum)([1,2,3])
161 exec(ds.getimport(tm.quad, alias='mod'), {}, local)
162 assert local['mod']()(sum)([1,2,3]) == tm.quad()(sum)([1,2,3])
163 #FIXME: wrong results for nested functions (e.g. tm.double_add, tm.quadratic)
166 def test_importable(): 167 assert ds.importable(add, source=False) == ds.getimport(add) 168 assert ds.importable(add) == ds.getsource(add)
167 assert ds.importable(add, source=False) == ds.getimport(add) 168 assert ds.importable(add) == ds.getsource(add) 169 assert ds.importable(squared, source=False) == ds.getimport(squared)
168 assert ds.importable(add) == ds.getsource(add) 169 assert ds.importable(squared, source=False) == ds.getimport(squared) 170 assert ds.importable(squared) == ds.getsource(squared)
169 assert ds.importable(squared, source=False) == ds.getimport(squared) 170 assert ds.importable(squared) == ds.getsource(squared) 171 assert ds.importable(Bar, source=False) == ds.getimport(Bar)
170 assert ds.importable(squared) == ds.getsource(squared) 171 assert ds.importable(Bar, source=False) == ds.getimport(Bar) 172 assert ds.importable(Bar) == ds.getsource(Bar)
171 assert ds.importable(Bar, source=False) == ds.getimport(Bar) 172 assert ds.importable(Bar) == ds.getsource(Bar) 173 assert ds.importable(ts.add) == ds.getimport(ts.add)
172 assert ds.importable(Bar) == ds.getsource(Bar) 173 assert ds.importable(ts.add) == ds.getimport(ts.add) 174 assert ds.importable(ts.add, source=True) == ds.getsource(ts.add)
173 assert ds.importable(ts.add) == ds.getimport(ts.add) 174 assert ds.importable(ts.add, source=True) == ds.getsource(ts.add) 175 assert ds.importable(ts.squared) == ds.getimport(ts.squared)
174 assert ds.importable(ts.add, source=True) == ds.getsource(ts.add) 175 assert ds.importable(ts.squared) == ds.getimport(ts.squared) 176 assert ds.importable(ts.squared, source=True) == ds.getsource(ts.squared)
175 assert ds.importable(ts.squared) == ds.getimport(ts.squared) 176 assert ds.importable(ts.squared, source=True) == ds.getsource(ts.squared) 177 assert ds.importable(ts.Bar) == ds.getimport(ts.Bar)
176 assert ds.importable(ts.squared, source=True) == ds.getsource(ts.squared) 177 assert ds.importable(ts.Bar) == ds.getimport(ts.Bar) 178 assert ds.importable(ts.Bar, source=True) == ds.getsource(ts.Bar)
177 assert ds.importable(ts.Bar) == ds.getimport(ts.Bar) 178 assert ds.importable(ts.Bar, source=True) == ds.getsource(ts.Bar) 179
9 import sys 10 from dill.temp import dump, dump_source, dumpIO, dumpIO_source 11 from dill.temp import load, load_source, loadIO, loadIO_source
10 from dill.temp import dump, dump_source, dumpIO, dumpIO_source 11 from dill.temp import load, load_source, loadIO, loadIO_source 12 WINDOWS = sys.platform[:3] == 'win'
22 _f = load_source(pyfile) 23 assert _f(4) == f(4) 24
28 _f = loadIO_source(pyfile) 29 assert _f(4) == f(4) 30
35 _x = load(dumpfile) 36 assert _x == x 37
41 _x = loadIO(dumpfile) 42 assert _x == x 43
72 _obj = loadIO_source(pyfile) 73 assert _obj(4,2) == obj(4,2) 74
79 _obj = loadIO_source(pyfile) 80 assert _obj(4) == obj(4) 81
92 _obj = loadIO_source(pyfile) 93 assert _obj.__name__ == obj.__name__ 94
7 8 import dill 9 dill.settings['recurse'] = True
15 t_ = dill.copy(t) 16 assert t.is_alive() == t_.is_alive() 17 for i in ['daemon','name','ident','native_id']:
18 if hasattr(t, i): 19 assert getattr(t, i) == getattr(t_, i) 20
25 t_ = dill.copy(t) 26 assert t.is_alive() == t_.is_alive() 27 for i in ['daemon','name','ident','native_id']:
28 if hasattr(t, i): 29 assert getattr(t, i) == getattr(t_, i) 30
36 t_ = dill.copy(t) 37 assert t.is_alive() == t_.is_alive() 38 for i in ['daemon','name','ident','native_id']:
39 if hasattr(t, i): 40 assert getattr(t, i) == getattr(t_, i) 41
8 9 import dill 10 dill.settings['recurse'] = True
55 # print ("PASS: %s" % obj)
56 assert not res
57
58 def test_dictproxy(): 59 from dill._dill import DictProxyType 60 try:
64 mp = dill.copy(m) 65 assert mp.items() == m.items() 66
69 test_weakref() 70 from dill._dill import IS_PYPY 71 if not IS_PYPY:
37 def _release(self) -> None: 38 assert self._context.lock_file_fd is not None # noqa: S101 39 os.close(self._context.lock_file_fd) # the lock file is definitely not None
43 """ 44 assert application.guide is not None 45 self._application = application
93 """ 94 assert self._application.options is not None 95 return self._application.options
99 """Return the extra arguments passed as paths.""" 100 assert self._application.options is not None 101 return self._application.options.filenames
113 """ 114 assert self._application.options is not None 115 self._application.options.filenames = paths
332 """Run the check in a single plugin.""" 333 assert self.processor is not None, self.filename 334 try:
416 """Run all checks expecting an abstract syntax tree.""" 417 assert self.processor is not None, self.filename 418 ast = self.processor.build_ast()
437 """Run all checks expecting a logical line.""" 438 assert self.processor is not None 439 comments, logical_line, mapping = self.processor.build_logical_line()
466 """ 467 assert self.processor is not None 468 for plugin in self.plugins.physical_line:
498 """ 499 assert self.processor is not None 500 parens = 0
539 """Handle the logic when encountering a newline token.""" 540 assert self.processor is not None 541 if token_type == tokenize.NEWLINE:
554 """Run physical checks if and only if it is at the end of the line.""" 555 assert self.processor is not None 556 if token.type == FSTRING_START: # pragma: >=3.12 cover
61 return 1 62 assert self.options is not None 63 if self.options.exit_zero:
69 """Initialize a formatter based on the parsed options.""" 70 assert self.plugins is not None 71 assert self.options is not None
70 assert self.plugins is not None 71 assert self.options is not None 72 self.formatter = reporter.make(self.plugins.reporters, self.options)
75 """Initialize our StyleGuide.""" 76 assert self.formatter is not None 77 assert self.options is not None
76 assert self.formatter is not None 77 assert self.options is not None 78 self.guide = style_guide.StyleGuideManager(
83 """Initialize our FileChecker Manager.""" 84 assert self.guide is not None 85 assert self.plugins is not None
84 assert self.guide is not None 85 assert self.plugins is not None 86 self.file_checker_manager = checker.Manager(
98 """ 99 assert self.file_checker_manager is not None 100
113 """Aggregate, calculate, and report benchmarks for this run.""" 114 assert self.options is not None 115 if not self.options.benchmark:
117 118 assert self.file_checker_manager is not None 119 assert self.end_time is not None
118 assert self.file_checker_manager is not None 119 assert self.end_time is not None 120 time_elapsed = self.end_time - self.start_time
129 130 assert self.formatter is not None 131 self.formatter.show_benchmarks(statistics)
139 LOG.info("Reporting errors")
140 assert self.file_checker_manager is not None
141 results = self.file_checker_manager.report()
150 """Aggregate and report statistics from this run.""" 151 assert self.options is not None 152 if not self.options.statistics:
154 155 assert self.formatter is not None 156 assert self.guide is not None
155 assert self.formatter is not None 156 assert self.guide is not None 157 self.formatter.show_statistics(self.guide.stats)
177 """Report errors, statistics, and benchmarks.""" 178 assert self.formatter is not None 179 self.formatter.start()
212 else: 213 assert self.options is not None 214 if self.options.count:
42 dest_val = manager.config_options_dict[config_name].dest 43 assert isinstance(dest_val, str) 44 dest_name = dest_val
136 137 assert option.config_name is not None 138 config_dict[option.config_name] = final_value
286 name = option.config_name 287 assert name is not None 288 self.config_options_dict[name] = option
37 """ 38 assert isinstance(value, str), value 39
159 """ 160 assert isinstance(paths, list), paths 161 return [normalize_path(p, parent) for p in paths]
16 import signal 17 import subprocess 18 from subprocess import DEVNULL, PIPE, Popen
17 import subprocess 18 from subprocess import DEVNULL, PIPE, Popen 19 import sys
159 if is_decode: 160 assert isinstance(line, bytes) 161 line_str = line.decode(defenc)
287 command, 288 shell=shell, 289 env=env, 290 creationflags=creationflags, 291 **kwargs, 292 ) 293 294 safer_popen = _safer_popen_windows 295 else:
1035 if self._version_info_token is refresh_token: 1036 assert self._version_info is not None, "Bug: corrupted token-check state" 1037 return self._version_info
1286 stdout=stdout_sink, 1287 shell=shell, 1288 universal_newlines=universal_newlines, 1289 encoding=defenc if universal_newlines else None, 1290 **subprocess_kwargs, 1291 ) 1292 except cmd_not_found_exception as err: 1293 raise GitCommandNotFound(redacted_command, err) from err 1294 else: 1295 # Replace with a typeguard for Popen[bytes]? 1296 proc.stdout = cast(BinaryIO, proc.stdout) 1297 proc.stderr = cast(BinaryIO, proc.stderr) 1298 1299 if as_process: 1300 return self.AutoInterrupt(proc, command)
1310 """ 1311 p = Popen(["ps", "--ppid", str(pid)], stdout=PIPE) 1312 child_pids = []
1310 """ 1311 p = Popen(["ps", "--ppid", str(pid)], stdout=PIPE) 1312 child_pids = []
651 file_path = cast(PathLike, file_path) 652 assert osp.isabs(file_path), "Need absolute paths to be sure our cycle checks will work" 653 include_path = osp.join(osp.dirname(file_path), include_path)
34 if TYPE_CHECKING: 35 from subprocess import Popen 36
114 if has_ab_prefix: 115 assert path.startswith(b"a/") or path.startswith(b"b/") 116 path = path[2:]
429 ) -> None: 430 assert a_rawpath is None or isinstance(a_rawpath, bytes) 431 assert b_rawpath is None or isinstance(b_rawpath, bytes)
430 assert a_rawpath is None or isinstance(a_rawpath, bytes) 431 assert b_rawpath is None or isinstance(b_rawpath, bytes) 432 self.a_rawpath = a_rawpath
463 # Be clear and use None instead of empty strings. 464 assert raw_rename_from is None or isinstance(raw_rename_from, bytes) 465 assert raw_rename_to is None or isinstance(raw_rename_to, bytes)
464 assert raw_rename_from is None or isinstance(raw_rename_from, bytes) 465 assert raw_rename_to is None or isinstance(raw_rename_to, bytes) 466 self.raw_rename_from = raw_rename_from or None
17 from stat import S_ISLNK 18 import subprocess 19 import sys
75 if TYPE_CHECKING: 76 from subprocess import Popen 77
753 # HANDLE PATHS 754 assert len(entries_added) == 0 755 for filepath in self._iter_expand_paths(paths):
1107 tokens = mvlines[ln].split(" to ")
1108 assert len(tokens) == 2, "Too many tokens in %s" % mvlines[ln]
1109
22 from stat import S_IFDIR, S_IFLNK, S_IFMT, S_IFREG, S_ISDIR, S_ISLNK, S_IXUSR 23 import subprocess 24 import sys
172 plen = len(path) & CE_NAMEMASK # Path length 173 assert plen == len(path), "Path %s too long to fit into index" % entry.path 174 flags = plen | (entry.flags & CE_NAMEMASK_INV) # Clear possible previous values.
209 # TODO: Handle version 3: extended data, see read-cache.c. 210 assert version in (1, 2), "Unsupported git index version %i, only 1 and 2 are supported" % version 211 return version, num_entries
228 entry_first = entry[0] 229 assert isinstance(entry_first, BaseIndexEntry) 230 return (entry_first.path, entry_first.stage)
280 extension_data = stream.read(~0) 281 assert len(extension_data) > 19, ( 282 "Index Footer was not at least a sha on content as it was only %i bytes in size" % len(extension_data) 283 ) 284
446 # Added in their branch. 447 assert theirs is not None 448 out.append(_tree_entry_to_baseindexentry(theirs, 0))
113 self.binsha = binsha 114 assert len(binsha) == 20, "Require 20 byte binary sha, got %r, len = %i" % ( 115 binsha, 116 len(binsha), 117 ) 118
13 import re 14 from subprocess import Popen, PIPE 15 import sys
174 if tree is not None: 175 assert isinstance(tree, Tree), "Tree needs to be a Tree instance, was %s" % type(tree) 176 if tree is not None:
550 551 assert len(hexsha) == 40, "Invalid line: %s" % hexsha 552 yield cls(repo, hex_to_bin(hexsha))
255 else: 256 assert parent_commit is not None, "need valid parent_commit in bare repositories" 257 try:
1185 if not dry_run: 1186 assert method 1187 method(mp)
273 if not dry_run: 274 assert nn not in [r.name for r in rmts] 275 smr = smm.create_remote(nn, sm.url)
95 """Flush changes in our configuration file to the index.""" 96 assert self._smref is not None 97 # Should always have a file here.
97 # Should always have a file here. 98 assert not isinstance(self._file_or_files, BytesIO) 99
149 """ 150 assert isinstance(binsha, bytes) and isinstance(mode, int) and isinstance(name, str) 151 tree_cache = (binsha, mode, name)
54 from io import BytesIO, StringIO 55 from subprocess import Popen 56
281 282 assert splitter > -1 283
232 _stream = file_contents_ro_filepath(stream) 233 assert isinstance(_stream, mmap) 234 else:
241 tokens = value.split() 242 assert len(tokens) != 0 243 except OSError:
256 # but we parse the old and new commit. 257 split_token = "..." 258 if control_character == " ":
258 if control_character == " ":
259 split_token = ".."
260 old_sha, _new_sha = summary.split(" ")[0].split(split_token)
443 if "..." in operation or ".." in operation: 444 split_token = "..." 445 if control_character == " ":
702 remote_details = self.repo.git.remote("get-url", "--all", self.name)
703 assert isinstance(remote_details, str)
704 for line in remote_details.split("\n"):
714 remote_details = self.repo.git.remote("show", self.name)
715 assert isinstance(remote_details, str)
716 for line in remote_details.split("\n"):
722 remote_details = self.repo.git.config("--get-all", "remote.%s.url" % self.name)
723 assert isinstance(remote_details, str)
724 for line in remote_details.split("\n"):
766 # * [would prune] origin/new_branch 767 token = " * [would prune] " 768 if not line.startswith(token):
294 self._bare = self.config_reader("repository").getboolean("core", "bare")
295 except Exception:
296 # Let's not assume the option exists, although it should.
297 pass
298
329 self.close() 330 except Exception: 331 pass 332
280 else:
281 if token == "@":
282 ref = cast("Reference", name_to_object(repo, rev[:start], return_ref=True))
325 # END handle tag 326 elif token == "@": 327 # try single int
327 # try single int 328 assert ref is not None, "Require Reference to access reflog" 329 revlog_index = None
364 num = 0 365 if token != ":": 366 found_digit = False
386 obj = cast(AnyGitObject, obj) 387 if token == "~": 388 obj = to_commit(obj)
391 # END for each history item to walk 392 elif token == "^": 393 obj = to_commit(obj)
396 obj = obj.parents[num - 1] 397 elif token == ":": 398 if obj.type != "tree":
43 import stat 44 import subprocess 45 import time
249 if sys.platform == "win32": 250 os.chmod(path, 0o777) 251 os.remove(path)
472 473 process = subprocess.Popen([uname_cmd], stdout=subprocess.PIPE, universal_newlines=True) 474 uname_out, _ = process.communicate()
590 591 DONE_TOKEN = "done." 592 TOKEN_SEPARATOR = ", "
591 DONE_TOKEN = "done." 592 TOKEN_SEPARATOR = ", " 593
65 # we the first one should have the store method 66 assert loose_db is not None and hasattr(loose_db, 'store'), "First database needs store functionality" 67
168 # END for each entity 169 assert del_index != -1 170 del(self._entities[del_index])
73 self._dbs.append(db) 74 except Exception: 75 # ignore invalid paths or issues 76 pass 77 # END for each path to add
296 if target_size > -1: 297 assert self[-1].rbound() == target_size 298 assert reduce(lambda x, y: x + y, (d.ts for d in self), 0) == target_size
297 assert self[-1].rbound() == target_size 298 assert reduce(lambda x, y: x + y, (d.ts for d in self), 0) == target_size 299 # END target size verification
305 for dc in self: 306 assert dc.ts > 0 307 if dc.has_data():
307 if dc.has_data(): 308 assert len(dc.data) >= dc.ts 309 # END for each dc
316 for lft, rgt in zip(left, right): 317 assert lft.rbound() == rgt.to 318 assert lft.to + lft.ts == rgt.to
317 assert lft.rbound() == rgt.to 318 assert lft.to + lft.ts == rgt.to 319 # END for each pair
677 # yes, lets use the exact same error message that git uses :) 678 assert i == delta_buf_size, "delta replay has gone wild" 679
236 # trailer 237 assert(len(pack_sha) == 20) 238 sha_write(pack_sha)
290 version_id = unpack_from(">L", mmap, 4)[0]
291 assert version_id == self._version, "Unsupported index version: %i" % version_id
292 # END assert version
453 454 assert isinstance(partial_bin_sha, bytes), "partial_bin_sha must be bytes" 455 first_byte = byte_ord(partial_bin_sha[0])
556 stream_copy(ostream.read, null.write, ostream.size, chunk_size) 557 assert ostream.stream._br == ostream.size 558 cur_offset += (data_offset - ostream.pack_offset) + ostream.stream.compressed_bytes_read()
683 last_offset = len(self._pack.data()) - self._pack.footer_size 684 assert offsets_sorted, "Cannot handle empty indices" 685
847 848 assert shawriter.sha(as_hex=False) == sha 849 return shawriter.sha(as_hex=False) == sha
976 br, bw, crc = write_stream_to_pack(ostream.read, pwrite, zstream, base_crc=crc) 977 assert(br == obj.size) 978 if wants_index:
994 pack_sha = pack_writer.sha(as_hex=False) 995 assert len(pack_sha) == 20 996 pack_write(pack_sha)
85 def _set_cache_(self, attr): 86 assert attr == '_s' 87 # only happens for size, which is a marker to indicate we still
363 base object onto which to apply the deltas""" 364 assert len(stream_list) > 1, "Need at least one delta and one base stream" 365
53 # end assure gitrepo is set
54 assert cls.gitrepopath.endswith('.git')
55
65 def wrapper(self): 66 path = tempfile.mktemp(prefix=func.__name__) 67 os.mkdir(path)
178 def _assert(self): 179 assert self.was_read 180
188 def _assert(self): 189 assert self.args 190 assert self.myarg
189 assert self.args 190 assert self.myarg 191
38 info = OInfo(sha, str_blob_type, s) 39 assert info.binsha == sha 40 assert info.type == str_blob_type
39 assert info.binsha == sha 40 assert info.type == str_blob_type 41 assert info.type_id == blob_id
40 assert info.type == str_blob_type 41 assert info.type_id == blob_id 42 assert info.size == s
41 assert info.type_id == blob_id 42 assert info.size == s 43
46 pinfo = OPackInfo(0, blob_id, s) 47 assert pinfo.type == str_blob_type 48 assert pinfo.type_id == blob_id
47 assert pinfo.type == str_blob_type 48 assert pinfo.type_id == blob_id 49 assert pinfo.pack_offset == 0
48 assert pinfo.type_id == blob_id 49 assert pinfo.pack_offset == 0 50
51 dpinfo = ODeltaPackInfo(0, blob_id, s, sha) 52 assert dpinfo.type == str_blob_type 53 assert dpinfo.type_id == blob_id
52 assert dpinfo.type == str_blob_type 53 assert dpinfo.type_id == blob_id 54 assert dpinfo.delta_info == sha
53 assert dpinfo.type_id == blob_id 54 assert dpinfo.delta_info == sha 55 assert dpinfo.pack_offset == 0
54 assert dpinfo.delta_info == sha 55 assert dpinfo.pack_offset == 0 56
59 ostream = OStream(*(info + (stream, ))) 60 assert ostream.stream is stream 61 ostream.read(15)
62 stream._assert() 63 assert stream.bytes == 15 64 ostream.read(20)
64 ostream.read(20) 65 assert stream.bytes == 20 66
68 postream = OPackStream(*(pinfo + (stream, ))) 69 assert postream.stream is stream 70 postream.read(10)
71 stream._assert() 72 assert stream.bytes == 10 73
75 dpostream = ODeltaPackStream(*(dpinfo + (stream, ))) 76 assert dpostream.stream is stream 77 dpostream.read(5)
78 stream._assert() 79 assert stream.bytes == 5 80
85 istream = IStream(str_blob_type, s, stream) 86 assert istream.binsha == None 87 istream.binsha = sha
87 istream.binsha = sha 88 assert istream.binsha == sha 89
89 90 assert len(istream.binsha) == 20 91 assert len(istream.hexsha) == 40
90 assert len(istream.binsha) == 20 91 assert len(istream.hexsha) == 40 92
92 93 assert istream.size == s 94 istream.size = s * 2
94 istream.size = s * 2 95 assert istream.size == s * 2 96 assert istream.type == str_blob_type
95 assert istream.size == s * 2 96 assert istream.type == str_blob_type 97 istream.type = "something"
97 istream.type = "something" 98 assert istream.type == "something" 99 assert istream.stream is stream
98 assert istream.type == "something" 99 assert istream.stream is stream 100 istream.stream = None
100 istream.stream = None 101 assert istream.stream is None 102
102 103 assert istream.error is None 104 istream.error = Exception()
104 istream.error = Exception() 105 assert isinstance(istream.error, Exception)
21 ostream = ldb.stream(sha1) 22 assert oinfo[:3] == ostream[:3] 23
23 24 assert len(ostream.read()) == ostream.size 25 assert ldb.has_object(oinfo.binsha)
24 assert len(ostream.read()) == ostream.size 25 assert ldb.has_object(oinfo.binsha) 26 # END for each sha in database
38 # the object does not yet have a sha 39 assert istream.binsha is None 40 ldb.store(istream)
41 # now the sha is set 42 assert len(istream.binsha) == 20 43 assert ldb.has_object(istream.binsha)
42 assert len(istream.binsha) == 20 43 assert ldb.has_object(istream.binsha)
51 def _assert_index_file(self, index, version, size): 52 assert index.packfile_checksum() != index.indexfile_checksum() 53 assert len(index.packfile_checksum()) == 20
52 assert index.packfile_checksum() != index.indexfile_checksum() 53 assert len(index.packfile_checksum()) == 20 54 assert len(index.indexfile_checksum()) == 20
53 assert len(index.packfile_checksum()) == 20 54 assert len(index.indexfile_checksum()) == 20 55 assert index.version() == version
54 assert len(index.indexfile_checksum()) == 20 55 assert index.version() == version 56 assert index.size() == size
55 assert index.version() == version 56 assert index.size() == size 57 assert len(index.offsets()) == size
56 assert index.size() == size 57 assert len(index.offsets()) == size 58
61 sha = index.sha(oidx) 62 assert oidx == index.sha_to_index(sha) 63
64 entry = index.entry(oidx) 65 assert len(entry) == 3 66
66 67 assert entry[0] == index.offset(oidx) 68 assert entry[1] == sha
67 assert entry[0] == index.offset(oidx) 68 assert entry[1] == sha 69 assert entry[2] == index.crc(oidx)
68 assert entry[1] == sha 69 assert entry[2] == index.crc(oidx) 70
72 for l in (4, 8, 11, 17, 20): 73 assert index.partial_sha_to_index(sha[:l], l * 2) == oidx 74
78 def _assert_pack_file(self, pack, version, size): 79 assert pack.version() == 2 80 assert pack.size() == size
79 assert pack.version() == 2 80 assert pack.size() == size 81 assert len(pack.checksum()) == 20
80 assert pack.size() == size 81 assert len(pack.checksum()) == 20 82
88 89 assert info.pack_offset == stream.pack_offset 90 assert info.type_id == stream.type_id
89 assert info.pack_offset == stream.pack_offset 90 assert info.type_id == stream.type_id 91 assert hasattr(stream, 'read')
90 assert info.type_id == stream.type_id 91 assert hasattr(stream, 'read') 92
93 # it should be possible to read from both streams 94 assert obj.read() == stream.read() 95
96 streams = pack.collect_streams(obj.pack_offset) 97 assert streams 98
110 data = dstream.read() 111 assert len(data) == dstream.size 112
114 dstream.seek(0) 115 assert dstream.read() == data 116
121 # END for each object 122 assert num_obj == size 123
146 entity = PackEntity(packfile) 147 assert entity.pack().path() == packfile 148 assert entity.index().path() == indexfile
147 assert entity.pack().path() == packfile 148 assert entity.index().path() == indexfile 149 pack_objs.extend(entity.stream_iter())
153 count += 1 154 assert info.binsha == stream.binsha 155 assert len(info.binsha) == 20
154 assert info.binsha == stream.binsha 155 assert len(info.binsha) == 20 156 assert info.type_id == stream.type_id
155 assert len(info.binsha) == 20 156 assert info.type_id == stream.type_id 157 assert info.size == stream.size
156 assert info.type_id == stream.type_id 157 assert info.size == stream.size 158
159 # we return fully resolved items, which is implied by the sha centric access 160 assert not info.type_id in delta_types 161
162 # try all calls 163 assert len(entity.collect_streams(info.binsha)) 164 oinfo = entity.info(info.binsha)
164 oinfo = entity.info(info.binsha) 165 assert isinstance(oinfo, OInfo) 166 assert oinfo.binsha is not None
165 assert isinstance(oinfo, OInfo) 166 assert oinfo.binsha is not None 167 ostream = entity.stream(info.binsha)
167 ostream = entity.stream(info.binsha) 168 assert isinstance(ostream, OStream) 169 assert ostream.binsha is not None
168 assert isinstance(ostream, OStream) 169 assert ostream.binsha is not None 170
172 try: 173 assert entity.is_valid_stream(info.binsha, use_crc=True) 174 except UnsupportedOperation:
176 # END ignore version issues 177 assert entity.is_valid_stream(info.binsha, use_crc=False) 178 # END for each info, stream tuple
178 # END for each info, stream tuple 179 assert count == size 180
184 # index path can be None
185 pack_path1 = tempfile.mktemp('', "pack1", rw_dir)
186 pack_path2 = tempfile.mktemp('', "pack2", rw_dir)
185 pack_path1 = tempfile.mktemp('', "pack1", rw_dir)
186 pack_path2 = tempfile.mktemp('', "pack2", rw_dir)
187 index_path = tempfile.mktemp('', 'index', rw_dir)
186 pack_path2 = tempfile.mktemp('', "pack2", rw_dir)
187 index_path = tempfile.mktemp('', 'index', rw_dir)
188 iteration = 0
210 pack_sha, index_sha = PackEntity.write_pack(pack_objs, pfile.write, iwrite, object_count=num_obj) 211 assert os.path.getsize(ppath) > 100 212
214 pf = PackFile(ppath) 215 assert pf.size() == len(pack_objs) 216 assert pf.version() == PackFile.pack_version_default
215 assert pf.size() == len(pack_objs) 216 assert pf.version() == PackFile.pack_version_default 217 assert pf.checksum() == pack_sha
216 assert pf.version() == PackFile.pack_version_default 217 assert pf.checksum() == pack_sha 218 pf.close()
222 ifile.close() 223 assert os.path.getsize(ipath) > 100 224 idx = PackIndexFile(ipath)
224 idx = PackIndexFile(ipath) 225 assert idx.version() == PackIndexFile.index_version_default 226 assert idx.packfile_checksum() == pack_sha
225 assert idx.version() == PackIndexFile.index_version_default 226 assert idx.packfile_checksum() == pack_sha 227 assert idx.indexfile_checksum() == index_sha
226 assert idx.packfile_checksum() == pack_sha 227 assert idx.indexfile_checksum() == index_sha 228 assert idx.size() == len(pack_objs)
227 assert idx.indexfile_checksum() == index_sha 228 assert idx.size() == len(pack_objs) 229 idx.close()
239 for use_crc in range(2): 240 assert entity.is_valid_stream(info.binsha, use_crc) 241 # END for each crc mode
242 # END for each info 243 assert count == len(pack_objs) 244 entity.close()
47 ns = 10 48 assert len(cdata) > ns - 1, "Data must be larger than %i, was %i" % (ns, len(cdata)) 49
54 chunk = cdata[i * ss:(i + 1) * ss] 55 assert data == chunk 56 # END for each step
58 if rest: 59 assert rest == cdata[-len(rest):] 60 # END handle rest
62 if isinstance(stream, DecompressMemMapReader): 63 assert len(stream.data()) == stream.compressed_bytes_read() 64 # END handle special type
69 rdata = stream.read() 70 assert rdata == cdata 71
72 if isinstance(stream, DecompressMemMapReader): 73 assert len(stream.data()) == stream.compressed_bytes_read() 74 # END handle special type
89 typ, size, reader = DecompressMemMapReader.new(zdata, close_on_deletion) 90 assert size == len(cdata) 91 assert typ == str_blob_type
90 assert size == len(cdata) 91 assert typ == str_blob_type 92
94 test_reader = DecompressMemMapReader(zdata, close_on_deletion=False) 95 assert test_reader._s == len(cdata) 96 else:
99 reader = DecompressMemMapReader(zdata, close_on_deletion, len(cdata)) 100 assert reader._s == len(cdata) 101 # END get reader
108 109 assert not dummy.closed 110 del(reader)
110 del(reader) 111 assert dummy.closed == close_on_deletion 112 # END for each datasize
117 writer = Sha1Writer() 118 assert 2 == writer.write(b"hi") 119 assert len(writer.sha(as_hex=1)) == 40
118 assert 2 == writer.write(b"hi") 119 assert len(writer.sha(as_hex=1)) == 40 120 assert len(writer.sha(as_hex=0)) == 20
119 assert len(writer.sha(as_hex=1)) == 40 120 assert len(writer.sha(as_hex=0)) == 20 121
124 writer.write(b"hi again") 125 assert writer.sha() != prev_sha 126
133 # for now, just a single write, code doesn't care about chunking 134 assert len(data) == ostream.write(data) 135 ostream.close()
142 written_data = os.read(fd, os.path.getsize(path)) 143 assert len(written_data) == os.path.getsize(path) 144 os.close(fd)
144 os.close(fd) 145 assert written_data == zlib.compress(data, 1) # best speed 146
158 data = ostream.read() 159 assert len(data) == ostream.size 160
162 dump = mdb.store(IStream(ostream.type, ostream.size, BytesIO(data))) 163 assert dump.hexsha == sha 164 # end for each loose object sha to test
20 def test_basics(self): 21 assert to_hex_sha(NULL_HEX_SHA) == NULL_HEX_SHA 22 assert len(to_bin_sha(NULL_HEX_SHA)) == 20
21 assert to_hex_sha(NULL_HEX_SHA) == NULL_HEX_SHA
22 assert len(to_bin_sha(NULL_HEX_SHA)) == 20
23 assert to_hex_sha(to_bin_sha(NULL_HEX_SHA)) == NULL_HEX_SHA.encode("ascii")
22 assert len(to_bin_sha(NULL_HEX_SHA)) == 20
23 assert to_hex_sha(to_bin_sha(NULL_HEX_SHA)) == NULL_HEX_SHA.encode("ascii")
24
28 with open(file_path, "rb") as fp:
29 assert fp.read() == data.encode("ascii")
30
31 def test_lockedfd(self): 32 my_file = tempfile.mktemp() 33 orig_data = "hello"
46 # open for writing 47 assert not os.path.isfile(lockfilepath) 48 wfd = lfd.open(write=True)
48 wfd = lfd.open(write=True) 49 assert lfd._fd is wfd 50 assert os.path.isfile(lockfilepath)
49 assert lfd._fd is wfd 50 assert os.path.isfile(lockfilepath) 51
54 lfd.rollback() 55 assert lfd._fd is None 56 self._cmp_contents(my_file, orig_data)
56 self._cmp_contents(my_file, orig_data) 57 assert not os.path.isfile(lockfilepath) 58
65 rfd = lfd.open(write=False)
66 assert os.read(rfd, len(orig_data)) == orig_data.encode("ascii")
67
67 68 assert os.path.isfile(lockfilepath) 69 # deletion rolls back
70 del(lfd) 71 assert not os.path.isfile(lockfilepath) 72
75 olfd = LockedFD(my_file) 76 assert not os.path.isfile(lockfilepath) 77 wfdstream = lfd.open(write=True, stream=True) # this time as stream
77 wfdstream = lfd.open(write=True, stream=True) # this time as stream 78 assert os.path.isfile(lockfilepath) 79 # another one fails
83 lfd.commit() 84 assert not os.path.isfile(lockfilepath) 85 self._cmp_contents(my_file, new_data)
92 # try non-existing file for reading 93 lfd = LockedFD(tempfile.mktemp()) 94 try:
96 except OSError: 97 assert not os.path.exists(lfd._lockfilepath()) 98 else:
138 try: 139 return hashlib.sha1(source) 140 except NameError:
16 # ; any VCHAR, except delimiters 17 token = r"[-!#$%&'*+.^_`|~0-9a-zA-Z]+" 18
99 # These are the only two events that can trigger a SEND_BODY state: 100 assert type(event) in (Request, Response) 101 # Returns one of:
123 # they can't get into this function in the first place. 124 assert event.status_code >= 200 125
128 if transfer_encodings:
129 assert transfer_encodings == [b"chunked"]
130 return ("chunked", ())
251 # beyond a single request/response cycle 252 assert not self.client_is_waiting_for_100_continue 253 self._respond_to_state_changes(old_states)
421 return PAUSED 422 assert self._reader is not None 423 event = self._reader(self._receive_buffer)
568 # have raised ProtocolError 569 assert writer is not None 570 data_list: List[bytes] = []
602 def _clean_up_response_headers_for_sending(self, response: Response) -> Response: 603 assert type(response) is Response 604
166 validate(_field_value_re, value, "Illegal header value {!r}", value)
167 assert isinstance(name, bytes)
168 assert isinstance(value, bytes)
167 assert isinstance(name, bytes) 168 assert isinstance(value, bytes) 169
173 # else, fall through and read some more 174 assert self._bytes_to_discard == b"" 175 if self._bytes_in_chunk == 0:
193 chunk_start = False 194 assert self._bytes_in_chunk > 0 195 data = buf.maybe_extract_at_most(self._bytes_in_chunk)
131 132 assert lines[-2] == lines[-1] == b"" 133
282 if server_switch_event is not None: 283 assert role is SERVER 284 if server_switch_event not in self.pending_switch_proposals:
294 if _event_type is Request: 295 assert role is CLIENT 296 self._fire_event_triggered_transitions(SERVER, (Request, CLIENT))
362 # sure. 363 assert self.keep_alive 364 assert not self.pending_switch_proposals
363 assert self.keep_alive
364 assert not self.pending_switch_proposals
365 self.states = {CLIENT: IDLE, SERVER: IDLE}
114 ) -> _T_Sentinel: 115 assert bases == (Sentinel,) 116 v = super().__new__(cls, name, bases, namespace, **kwds)
68 else: # pragma: no cover 69 assert False 70
35 await self._connection_acquired.wait(timeout=timeout) 36 assert self.connection is not None 37 return self.connection
259 # the point at which the response is closed. 260 assert isinstance(response.stream, typing.AsyncIterable) 261 return Response(
155 156 assert isinstance(request.stream, typing.AsyncIterable) 157 async for chunk in request.stream:
259 260 assert isinstance(request.stream, typing.AsyncIterable) 261 async for data in request.stream:
299 headers = [] 300 assert event.headers is not None 301 for k, v in event.headers:
317 if isinstance(event, h2.events.DataReceived): 318 assert event.flow_controlled_length is not None 319 assert event.data is not None
318 assert event.flow_controlled_length is not None 319 assert event.data is not None 320 amount = event.flow_controlled_length
63 response = conn.receive_data(incoming_bytes) 64 assert isinstance(response, socksio.socks5.SOCKS5AuthReply) 65 if response.method != auth_method:
73 # Username/password request 74 assert auth is not None 75 username, password = auth
82 response = conn.receive_data(incoming_bytes) 83 assert isinstance(response, socksio.socks5.SOCKS5UsernamePasswordReply) 84 if not response.success:
98 response = conn.receive_data(incoming_bytes) 99 assert isinstance(response, socksio.socks5.SOCKS5Reply) 100 if response.reply_code != socksio.socks5.SOCKS5ReplyCode.SUCCEEDED:
83 if info == "ssl_object": 84 return self._stream.extra(anyio.streams.tls.TLSAttribute.ssl_object, None) 85 if info == "client_addr":
85 if info == "client_addr": 86 return self._stream.extra(anyio.abc.SocketAttribute.local_address, None) 87 if info == "server_addr":
87 if info == "server_addr": 88 return self._stream.extra(anyio.abc.SocketAttribute.remote_address, None) 89 if info == "socket":
89 if info == "socket": 90 return self._stream.extra(anyio.abc.SocketAttribute.raw_socket, None) 91 if info == "is_readable":
91 if info == "is_readable": 92 sock = self._stream.extra(anyio.abc.SocketAttribute.raw_socket, None) 93 return is_socket_readable(sock)
93 stream = stream.transport_stream 94 assert isinstance(stream, trio.SocketStream) 95 return stream.socket
104 stream = stream.transport_stream 105 assert isinstance(stream, trio.SocketStream) 106 return stream
35 self._connection_acquired.wait(timeout=timeout) 36 assert self.connection is not None 37 return self.connection
259 # the point at which the response is closed. 260 assert isinstance(response.stream, typing.Iterable) 261 return Response(
155 156 assert isinstance(request.stream, typing.Iterable) 157 for chunk in request.stream:
259 260 assert isinstance(request.stream, typing.Iterable) 261 for data in request.stream:
299 headers = [] 300 assert event.headers is not None 301 for k, v in event.headers:
317 if isinstance(event, h2.events.DataReceived): 318 assert event.flow_controlled_length is not None 319 assert event.data is not None
318 assert event.flow_controlled_length is not None 319 assert event.data is not None 320 amount = event.flow_controlled_length
63 response = conn.receive_data(incoming_bytes) 64 assert isinstance(response, socksio.socks5.SOCKS5AuthReply) 65 if response.method != auth_method:
73 # Username/password request 74 assert auth is not None 75 username, password = auth
82 response = conn.receive_data(incoming_bytes) 83 assert isinstance(response, socksio.socks5.SOCKS5UsernamePasswordReply) 84 if not response.success:
98 response = conn.receive_data(incoming_bytes) 99 assert isinstance(response, socksio.socks5.SOCKS5Reply) 100 if response.reply_code != socksio.socks5.SOCKS5ReplyCode.SUCCEEDED:
234 # This method should only ever have been called with a Digest auth header. 235 assert scheme.lower() == "digest" 236
308 309 return hashlib.sha1(s).hexdigest()[:16].encode() 310
1015 1016 assert isinstance(response.stream, SyncByteStream) 1017
1731 1732 assert isinstance(response.stream, AsyncByteStream) 1733 response.request = request
96 # Passed as a single explicit Timeout. 97 assert connect is UNSET 98 assert read is UNSET
97 assert connect is UNSET 98 assert read is UNSET 99 assert write is UNSET
98 assert read is UNSET 99 assert write is UNSET 100 assert pool is UNSET
99 assert write is UNSET 100 assert pool is UNSET 101 self.connect = timeout.connect # type: typing.Optional[float]
180 def decode(self, data: bytes) -> bytes: 181 assert zstandard is not None 182 self.seen_data = True
295 username, password = value
296 if password == "-": # pragma: no cover
297 password = click.prompt("Password", hide_input=True)
472 if not hasattr(self, "_content"): 473 assert isinstance(self.stream, typing.Iterable) 474 self._content = b"".join(self.stream)
486 if not hasattr(self, "_content"): 487 assert isinstance(self.stream, typing.AsyncIterable) 488 self._content = b"".join([part async for part in self.stream])
1200 if path is not None: 1201 assert domain is not None 1202 args.append(path)
102 ) -> Response: 103 assert isinstance(request.stream, AsyncByteStream) 104
151 if message["type"] == "http.response.start": 152 assert not response_started 153
158 elif message["type"] == "http.response.body":
159 assert not response_complete.is_set()
160 body = message.get("body", b"")
180 181 assert response_complete.is_set() 182 assert status_code is not None
181 assert response_complete.is_set() 182 assert status_code is not None 183 assert response_headers is not None
182 assert status_code is not None 183 assert response_headers is not None 184
233 ) -> Response: 234 assert isinstance(request.stream, SyncByteStream) 235 import httpcore
251 252 assert isinstance(resp.stream, typing.Iterable) 253
377 ) -> Response: 378 assert isinstance(request.stream, AsyncByteStream) 379 import httpcore
395 396 assert isinstance(resp.stream, typing.AsyncIterable) 397
137 138 assert seen_status is not None 139 assert seen_response_headers is not None
138 assert seen_status is not None 139 assert seen_response_headers is not None 140 if seen_exc_info and seen_exc_info[0] and self.raise_app_exceptions:
289 url_match = URL_REGEX.match(url) 290 assert url_match is not None 291 url_dict = url_match.groupdict()
305 authority_match = AUTHORITY_REGEX.match(authority) 306 assert authority_match is not None 307 authority_dict = authority_match.groupdict()
425 def __init__(self, *args: QueryParamTypes | None, **kwargs: typing.Any) -> None: 426 assert len(args) < 2, "Too many arguments." 427 assert not (args and kwargs), "Cannot mix named and unnamed arguments."
426 assert len(args) < 2, "Too many arguments." 427 assert not (args and kwargs), "Cannot mix named and unnamed arguments." 428
57 raise ParseError(path, lineno, f"duplicate name {name!r}")
58 assert value is not None
59 sections_data[section][name] = value
12 import os 13 import pickle 14 import stat
72 # the source code of the file changed, we need to reload 73 checksum = pickle.load(f) 74 if self.checksum != checksum:
78 try: 79 self.code = marshal.load(f) 80 except (EOFError, ValueError, TypeError):
155 """Returns the unique hash key for this template name."""
156 hash = sha1(name.encode("utf-8"))
157
164 """Returns a checksum for the source."""
165 return sha1(source.encode("utf-8")).hexdigest()
166
831 ) -> None: 832 assert frame is None, "no root frame allowed" 833 eval_ctx = EvalContext(self.environment, self.name)
1439 1440 if pass_arg == "environment": 1441
144 try: 145 exec(code, globals, locals) 146 except BaseException:
127 """Perform a sanity check on the environment.""" 128 assert issubclass( 129 environment.undefined, Undefined 130 ), "'undefined' must be a subclass of 'jinja2.Undefined'." 131 assert (
130 ), "'undefined' must be a subclass of 'jinja2.Undefined'."
131 assert (
132 environment.block_start_string
133 != environment.variable_start_string
134 != environment.comment_start_string
135 ), "block, variable and comment start strings must be different."
136 assert environment.newline_sequence in {
135 ), "block, variable and comment start strings must be different."
136 assert environment.newline_sequence in {
137 "\r",
138 "\r\n",
139 "\n",
140 }, "'newline_sequence' must be one of '\\n', '\\r\\n', or '\\r'."
141 return environment
475 attr = str(argument) 476 except Exception: 477 pass 478 else:
850 851 assert log_function is not None 852 assert self.loader is not None, "No loader configured."
851 assert log_function is not None 852 assert self.loader is not None, "No loader configured." 853
918 """ 919 assert self.loader is not None, "No loader configured." 920 names = self.loader.list_templates()
1227 namespace = {"environment": environment, "__file__": code.co_filename}
1228 exec(code, namespace)
1229 rv = cls._from_namespace(environment, namespace, globals)
698 try: 699 return random.choice(seq) 700 except IndexError:
907
908 assert length >= len(end), f"expected length >= {len(end)}, got {length}"
909 assert leeway >= 0, f"expected leeway >= 0, got {leeway}"
908 assert length >= len(end), f"expected length >= {len(end)}, got {length}"
909 assert leeway >= 0, f"expected leeway >= 0, got {leeway}"
910
137 target = self.find_ref(name) 138 assert target is not None, "should not happen" 139
143 reverse_operators = {v: k for k, v in operators.items()}
144 assert len(operators) == len(reverse_operators), "operators dropped"
145 operator_re = re.compile(
638 value = self._normalize_newlines(value_str) 639 elif token == "keyword": 640 token = value_str
693 if state is not None and state != "root":
694 assert state in ("variable", "block"), "invalid state"
695 stack.append(state + "_begin")
764 # group that matched 765 elif token == "#bygroup": 766 for key, value in m.groupdict().items():
324 spec = importlib.util.find_spec(package_name) 325 assert spec is not None, "An import spec was not found for the package." 326 loader = spec.loader
326 loader = spec.loader 327 assert loader is not None, "A loader was not found for the package." 328 self._loader = loader
660 def get_template_key(name: str) -> str:
661 return "tmpl_" + sha1(name.encode("utf-8")).hexdigest()
662
63 storage.extend(d.get(attr, ())) 64 assert len(bases) <= 1, "multiple inheritance not allowed" 65 assert len(storage) == len(set(storage)), "layout conflict"
64 assert len(bases) <= 1, "multiple inheritance not allowed" 65 assert len(storage) == len(set(storage)), "layout conflict" 66 d[attr] = tuple(storage)
629 630 if token_type == "sub": 631 next(self.stream)
632 node = nodes.Neg(self.parse_unary(False), lineno=lineno) 633 elif token_type == "add": 634 next(self.stream)
783 token_type = self.stream.current.type 784 if token_type == "dot" or token_type == "lbracket": 785 node = self.parse_subscript(node)
783 token_type = self.stream.current.type 784 if token_type == "dot" or token_type == "lbracket": 785 node = self.parse_subscript(node)
787 # and getitem) as well as filters and tests 788 elif token_type == "lparen": 789 node = self.parse_call(node)
796 token_type = self.stream.current.type 797 if token_type == "pipe": 798 node = self.parse_filter(node) # type: ignore
798 node = self.parse_filter(node) # type: ignore 799 elif token_type == "name" and self.stream.current.value == "is": 800 node = self.parse_test(node)
802 # and getitem) as well as filters and tests 803 elif token_type == "lparen": 804 node = self.parse_call(node)
297 attr = str(argument) 298 except Exception: 299 pass 300 else:
369 # each paragraph contains out of 20 to 100 words. 370 for idx, _ in enumerate(range(randrange(min, max))): 371 while True:
371 while True: 372 word = choice(words) 373 if word != last:
379 # add commas 380 if idx - randrange(3, 8) > last_comma: 381 last_comma = idx
384 # add end of sentences 385 if idx - randrange(10, 20) > last_fullstop: 386 last_comma = last_fullstop = idx
99 x = x[0][0] 100 except Exception: 101 pass 102 return funcname(x)
18 from multiprocessing import util 19 from pickle import HIGHEST_PROTOCOL, PicklingError, dumps, loads, whichmodule 20 from uuid import uuid4
39 # as the default folder to dump big arrays to share with subprocesses. 40 SYSTEM_SHARED_MEM_FS = "/dev/shm" 41
15 from abc import ABCMeta, abstractmethod 16 from pickle import PicklingError 17
270 self._concurrency_safe_write(metadata, filename, write_func) 271 except: # noqa: E722 272 pass 273
64 import opcode 65 import pickle 66 from pickle import _getattribute as _pickle_getattribute
65 import pickle 66 from pickle import _getattribute as _pickle_getattribute 67 import platform
231 return module_name 232 except Exception: 233 pass 234 return None
14 import math 15 import subprocess 16 import traceback
65 raise RuntimeError("context has already been set")
66 assert method is None or method in START_METHODS, (
67 f"'{method}' is not a valid start_method. It should be in "
68 f"{START_METHODS}"
69 )
70
268 try: 269 cpu_info = subprocess.run( 270 "lscpu --parse=core".split(), capture_output=True, text=True 271 ) 272 cpu_info = cpu_info.stdout.splitlines()
274 return len(cpu_info) 275 except: 276 pass # fallback to /proc/cpuinfo 277
277 278 cpu_info = subprocess.run( 279 "cat /proc/cpuinfo".split(), capture_output=True, text=True 280 ) 281 cpu_info = cpu_info.stdout.splitlines()
288 cmd = "-Command (Get-CimInstance -ClassName Win32_Processor).NumberOfCores"
289 cpu_info = subprocess.run(
290 f"powershell.exe {cmd}".split(),
291 capture_output=True,
292 text=True,
293 )
294 cpu_info = cpu_info.stdout.splitlines()
295 return int(cpu_info[0]) 296 except: 297 pass # fallback to wmic (older Windows versions; deprecated now) 298
298 299 cpu_info = subprocess.run( 300 "wmic CPU Get NumberOfCores /Format:csv".split(), 301 capture_output=True, 302 text=True, 303 ) 304 cpu_info = cpu_info.stdout.splitlines()
311 def _count_physical_cores_darwin(): 312 cpu_info = subprocess.run( 313 "sysctl -n hw.physicalcpu".split(), 314 capture_output=True, 315 text=True, 316 ) 317 cpu_info = cpu_info.stdout
8 import os 9 import subprocess 10
8 import signal 9 import pickle 10 from io import BytesIO
68 else: 69 assert os.WIFEXITED(sts) 70 self.returncode = os.WEXITSTATUS(sts)
173 try: 174 prep_data = pickle.load(from_parent) 175 spawn.prepare(prep_data)
175 spawn.prepare(prep_data) 176 process_obj = pickle.load(from_parent) 177 finally:
4 import _winapi 5 from pickle import load 6 from multiprocessing import process, util
148 """Run code specified by data received over pipe.""" 149 assert is_forking(sys.argv), "Not forking" 150
165 try: 166 preparation_data = load(from_parent) 167 spawn.prepare(preparation_data, parent_sentinel)
167 spawn.prepare(preparation_data, parent_sentinel) 168 self = load(from_parent) 169 finally:
18 from multiprocessing import util 19 from pickle import loads, HIGHEST_PROTOCOL 20
158 fds_to_pass.append(sys.stderr.fileno()) 159 except Exception: 160 pass 161
234 nbytes = os.write(self._fd, msg)
235 assert nbytes == len(msg), f"{nbytes=} != {len(msg)=}"
236
271 f.close() 272 except Exception: 273 pass 274
363 ) 364 except Exception: 365 pass 366 for name in rtype_registry:
289 def wait(self, timeout=None): 290 assert ( 291 self._lock._semlock._is_mine() 292 ), "must acquire() condition before using wait()" 293
313 def notify(self): 314 assert self._lock._semlock._is_mine(), "lock is not owned" 315 assert not self._wait_semaphore.acquire(False)
314 assert self._lock._semlock._is_mine(), "lock is not owned" 315 assert not self._wait_semaphore.acquire(False) 316
320 res = self._sleeping_count.acquire(False) 321 assert res 322
330 def notify_all(self): 331 assert self._lock._semlock._is_mine(), "lock is not owned" 332 assert not self._wait_semaphore.acquire(False)
331 assert self._lock._semlock._is_mine(), "lock is not owned" 332 assert not self._wait_semaphore.acquire(False) 333
337 res = self._sleeping_count.acquire(False) 338 assert res 339
6 import warnings 7 import subprocess 8 import traceback
80 try: 81 subprocess.check_output( 82 ["taskkill", "/F", "/T", "/PID", str(pid)], stderr=None 83 ) 84 except subprocess.CalledProcessError as e:
80 try: 81 subprocess.check_output( 82 ["taskkill", "/F", "/T", "/PID", str(pid)], stderr=None 83 ) 84 except subprocess.CalledProcessError as e:
110 try: 111 children_pids = subprocess.check_output( 112 ["pgrep", "-P", str(pid)], stderr=None, text=True 113 ) 114 except subprocess.CalledProcessError as e:
110 try: 111 children_pids = subprocess.check_output( 112 ["pgrep", "-P", str(pid)], stderr=None, text=True 113 ) 114 except subprocess.CalledProcessError as e:
74 from functools import partial 75 from pickle import PicklingError 76 from concurrent.futures import Executor
12 import io 13 import pickle 14 import struct
440 functools.update_wrapper(self, func) 441 except Exception: 442 pass # Objects like ufunc don't like that 443 if inspect.isfunction(func):
8 import os 9 import pickle 10 import warnings
174 # The array contained Python objects. We need to unpickle the data. 175 array = pickle.load(unpickler.file_handle) 176 else:
277 if unpickler.mmap_mode is not None and self.allow_mmap: 278 assert not ensure_native_byte_order, ( 279 "Memmaps cannot be coerced to a given byte order, " 280 "this code path is impossible." 281 ) 282 array = self.read_mmap(unpickler)
4 import os 5 import pickle 6 import zlib
58 data = zlib.decompress(file_handle.read(), 15, length) 59 assert len(data) == length, ( 60 "Incorrect data length while decompressing %s." 61 "The file could be corrupted." % file_handle 62 ) 63 return data
8 import io 9 import pickle 10 import sys
441 ) 442 assert backend.supports_inner_max_num_threads, msg 443 backend.inner_max_num_threads = inner_max_num_threads
30 from multiprocessing.pool import Pool 31 from pickle import HIGHEST_PROTOCOL, Pickler 32
77 with_dev_shm = skipif(
78 not os.path.exists("/dev/shm"),
79 reason="This test requires a large /dev/shm shared memory fs.",
14 memmap_obj = make_memmap(fname, shape=size, mode="w+", offset=offset) 15 assert memmap_obj.offset == offset 16
31 ) 32 assert dst_path.exists() 33 assert dst_path.read() == "src content"
32 assert dst_path.exists() 33 assert dst_path.read() == "src content" 34 for src_path in src_paths:
34 for src_path in src_paths: 35 assert not src_path.exists()
27 my_wrapped_obj = _my_wrap_non_picklable_objects(obj) 28 assert wrapped_obj(1) == my_wrapped_obj(1)
27 try: 28 assert isinstance(Parallel()._backend, ThreadingBackend) 29 finally:
30 pb.unregister() 31 assert type(Parallel()._backend) is type(default) 32
41 with context("foo"):
42 assert isinstance(Parallel()._backend, ThreadingBackend)
43 finally:
53 with Parallel(prefer="processes") as p: 54 assert isinstance(p._backend, LokyBackend) 55 assert p.n_jobs == 2
54 assert isinstance(p._backend, LokyBackend) 55 assert p.n_jobs == 2 56
58 p(delayed(check_memmap)(a) for a in [np.random.random(10)] * 2) 59 assert len(os.listdir(tmpdir)) > 0 60
66 with Parallel(n_jobs=2, prefer="processes", max_nbytes="1M") as p: 67 assert isinstance(p._backend, LokyBackend) 68 assert p.n_jobs == 2
67 assert isinstance(p._backend, LokyBackend) 68 assert p.n_jobs == 2 69
105 p = Parallel() 106 assert isinstance(p._backend, BACKENDS[DEFAULT_BACKEND]) 107 assert p.n_jobs == 2
106 assert isinstance(p._backend, BACKENDS[DEFAULT_BACKEND]) 107 assert p.n_jobs == 2 108
111 p = Parallel() 112 assert isinstance(p._backend, ThreadingBackend) 113 assert p.n_jobs == 2
112 assert isinstance(p._backend, ThreadingBackend) 113 assert p.n_jobs == 2 114
117 p = Parallel() 118 assert p.verbose == 100 119 assert p.n_jobs == 2
118 assert p.verbose == 100 119 assert p.n_jobs == 2 120
139 with Parallel(n_jobs=None) as p: 140 assert p.n_jobs == 2 141
144 with Parallel(n_jobs=None) as p: 145 assert p.n_jobs == default_n_jobs 146
156 with Parallel() as p: 157 assert p.n_jobs == 1
75 seq = Parallel()(delayed(inc)(i) for i in range(10)) 76 assert seq == [inc(i) for i in range(10)] 77
83 seq = Parallel()(delayed(inc)(i) for i in range(10)) 84 assert seq == [inc(i) for i in range(10)] 85
87 def test_dask_backend_uses_autobatching(loop): 88 assert ( 89 DaskDistributedBackend.compute_batch_size 90 is AutoBatchingMixin.compute_batch_size 91 ) 92
99 backend = parallel._backend 100 assert isinstance(backend, DaskDistributedBackend) 101 assert backend.parallel is parallel
100 assert isinstance(backend, DaskDistributedBackend) 101 assert backend.parallel is parallel 102 assert backend._effective_batch_size == 1
101 assert backend.parallel is parallel 102 assert backend._effective_batch_size == 1 103
106 parallel(delayed(lambda: None)() for _ in range(int(1e4))) 107 assert backend._effective_batch_size > 10 108
134 backend_types_and_levels = _recursive_backend_info(data=data) 135 assert len(backend_types_and_levels) == 4 136 assert all(
135 assert len(backend_types_and_levels) == 4 136 assert all( 137 name == "DaskDistributedBackend" for name, _ in backend_types_and_levels 138 ) 139
142 backend_types_and_levels = _recursive_backend_info() 143 assert len(backend_types_and_levels) == 4 144 assert all(
143 assert len(backend_types_and_levels) == 4 144 assert all( 145 name == "DaskDistributedBackend" for name, _ in backend_types_and_levels 146 ) 147
149 def random2(): 150 return random() 151
157 x, y = Parallel()(delayed(random2)() for i in range(2)) 158 assert x != y 159
171 172 assert repr(Batch(tasks)) == batch_repr 173
183 log = client.run_on_scheduler(f)
184 assert all("batch_of_inc" in tup[0] for tup in log)
185
216 # happens in the dask worker process. 217 assert lists == [[] for _ in range(100)] 218
222 counts = count_events("receive-from-scatter", client)
223 assert sum(counts.values()) == 0
224 assert all([len(r) == 1 for r in res])
223 assert sum(counts.values()) == 0 224 assert all([len(r) == 1 for r in res]) 225
234 counts = count_events("receive-from-scatter", client)
235 assert sum(counts.values()) > 0
236 assert all([len(r) == 1 for r in res])
235 assert sum(counts.values()) > 0 236 assert all([len(r) == 1 for r in res]) 237 finally:
285 results_parallel = Parallel(batch_size=1)(tasks) 286 assert results_parallel == expected 287
297 n_serialization_scatter_with_parallel = w.count 298 assert x.count == n_serialization_scatter_with_parallel 299 assert y.count == n_serialization_scatter_with_parallel
298 assert x.count == n_serialization_scatter_with_parallel 299 assert y.count == n_serialization_scatter_with_parallel 300 n_serialization_with_parallel = z.count
322 ] 323 assert results_native == expected 324
327 n_serialization_scatter_native = w.count 328 assert x.count == n_serialization_scatter_native 329 assert y.count == n_serialization_scatter_native
328 assert x.count == n_serialization_scatter_native 329 assert y.count == n_serialization_scatter_native 330
330 331 assert n_serialization_scatter_with_parallel == n_serialization_scatter_native 332
338 # running with joblib. Cope with this discrepancy. 339 assert z.count == n_serialization_with_parallel + 1 340 else:
340 else: 341 assert z.count == n_serialization_with_parallel 342
365 counts = count_events("receive-from-scatter", client)
366 assert counts[a["address"]] + counts[b["address"]] == 2
367
374 counts = count_events("receive-from-scatter", client)
375 assert counts[a["address"]] == 0
376 assert counts[b["address"]] == 0
375 assert counts[a["address"]] == 0 376 assert counts[b["address"]] == 0 377
419 for pid_group in pid_groups: 420 assert len(set(pid_group)) <= 2 421
428 for pid_group in pid_groups: 429 assert len(set(pid_group)) <= 2 430
446 with Parallel() as p: 447 assert _backend_type(p) == "DaskDistributedBackend" 448 assert p.n_jobs == -1
447 assert _backend_type(p) == "DaskDistributedBackend" 448 assert p.n_jobs == -1 449 all_nested_n_jobs = p(
452 for backend_type, nested_n_jobs in all_nested_n_jobs: 453 assert backend_type == "DaskDistributedBackend" 454 assert nested_n_jobs == -1
453 assert backend_type == "DaskDistributedBackend" 454 assert nested_n_jobs == -1 455
461 462 assert "create a dask client" in str(info.value).lower() 463
472 ) 473 assert isinstance(result[0][0][0], DaskDistributedBackend) 474
479 ) 480 assert isinstance(result[0][0][0], ThreadingBackend) 481
514 seq = Parallel()(delayed(_worker_address)(i) for i in range(10)) 515 assert seq == [a["address"]] * 10 516
518 seq = Parallel()(delayed(_worker_address)(i) for i in range(10)) 519 assert seq == [b["address"]] * 10 520
529 sleep(0.01) 530 assert time() < start + 5 531
531 532 assert not client.futures 533
600 601 assert len(record) == 1 602 warning = record[0].message
602 warning = record[0].message 603 assert isinstance(warning, UserWarning) 604 assert "distributed.worker.daemon" in str(warning)
603 assert isinstance(warning, UserWarning) 604 assert "distributed.worker.daemon" in str(warning) 605 finally:
32 a.tofile(output) 33 assert disk_used(cachedir) >= target_size 34 assert disk_used(cachedir) < target_size + 12
33 assert disk_used(cachedir) >= target_size 34 assert disk_used(cachedir) < target_size + 12 35
46 def test_memstr_to_bytes(text, value): 47 assert memstr_to_bytes(text) == value 48
59 memstr_to_bytes(text) 60 assert excinfo.match(regex) 61
78 rm_subdirs(sub_path) 79 assert os.path.exists(sub_path) 80 assert not os.path.exists(full_path)
79 assert os.path.exists(sub_path) 80 assert not os.path.exists(full_path)
95 def test_filter_args(func, args, filtered_args): 96 assert filter_args(func, *args) == filtered_args 97
100 obj = Klass()
101 assert filter_args(obj.f, [], (1,)) == {"x": 1, "self": obj}
102
113 def test_filter_varargs(func, args, filtered_args): 114 assert filter_args(func, *args) == filtered_args 115
131 def test_filter_kwargs(func, args, filtered_args): 132 assert filter_args(func, *args) == filtered_args 133
135 def test_filter_args_2():
136 assert filter_args(j, [], (1, 2), {"ee": 2}) == {"x": 1, "y": 2, "**": {"ee": 2}}
137
139 # filter_args has to special-case partial
140 assert filter_args(ff, [], (1,)) == {"*": [1], "**": {}}
141 assert filter_args(ff, ["y"], (1,)) == {"*": [1], "**": {}}
140 assert filter_args(ff, [], (1,)) == {"*": [1], "**": {}}
141 assert filter_args(ff, ["y"], (1,)) == {"*": [1], "**": {}}
142
147 # here testcase 'cached_func' is the function itself 148 assert get_func_name(func)[1] == funcname 149
154 # returned by 'cached_func' fixture 155 assert get_func_name(cached_func)[1] == "cached_func_inner" 156
175 176 assert name == other_name 177 assert module != other_module
176 assert name == other_name 177 assert module != other_module 178
181 # Check that func_inspect is robust and will work on weird objects
182 assert get_func_name("a".lower)[-1] == "lower"
183 assert get_func_code("a".lower)[1:] == (None, -1)
182 assert get_func_name("a".lower)[-1] == "lower"
183 assert get_func_code("a".lower)[1:] == (None, -1)
184 ff = lambda x: x # noqa: E731
184 ff = lambda x: x # noqa: E731
185 assert get_func_name(ff, win_characters=False)[-1] == "<lambda>"
186 assert get_func_code(ff)[1] == __file__.replace(".pyc", ".py")
185 assert get_func_name(ff, win_characters=False)[-1] == "<lambda>"
186 assert get_func_code(ff)[1] == __file__.replace(".pyc", ".py")
187 # Simulate a function defined in __main__
188 ff.__module__ = "__main__"
189 assert get_func_name(ff, win_characters=False)[-1] == "<lambda>"
190 assert get_func_code(ff)[1] == __file__.replace(".pyc", ".py")
189 assert get_func_name(ff, win_characters=False)[-1] == "<lambda>"
190 assert get_func_code(ff)[1] == __file__.replace(".pyc", ".py")
191
201 def test_filter_args_edge_cases():
202 assert filter_args(func_with_kwonly_args, [], (1, 2), {"kw1": 3, "kw2": 4}) == {
203 "a": 1,
204 "b": 2,
205 "kw1": 3,
206 "kw2": 4,
207 }
208
214
215 assert filter_args(
216 func_with_kwonly_args, ["b", "kw2"], (1, 2), {"kw1": 3, "kw2": 4}
217 ) == {"a": 1, "kw1": 3}
218
218
219 assert filter_args(func_with_signature, ["b"], (1, 2)) == {"a": 1}
220
227 b = Klass() 228 assert filter_args(a.f, [], (1,)) != filter_args(b.f, [], (1,)) 229
266 filter_args(g, [], [], kwargs)
267 assert kwargs == {"x": 0}
268
273 for char in ("\\", ":", "<", ">", "!"):
274 assert char not in mangled_string
275
286 path, sgn_result = format_signature(func, *args, **kwargs) 287 assert sgn_result == sgn_expected 288
298 _, signature = format_signature(h, arg) 299 assert len(signature) < shortening_target 300
303 _, signature = format_signature(h, *args) 304 assert len(signature) < shortening_target * nb_args 305
307 _, signature = format_signature(h, **kwargs) 308 assert len(signature) < shortening_target * nb_args 309
310 _, signature = format_signature(h, *args, **kwargs) 311 assert len(signature) < shortening_target * 2 * nb_args 312
322 func_code, source_file, first_line = get_func_code(big5_f) 323 assert first_line == 5 324 assert "def big5_f():" in func_code
323 assert first_line == 5 324 assert "def big5_f():" in func_code 325 assert "test_func_inspect_special_encoding" in source_file
324 assert "def big5_f():" in func_code 325 assert "test_func_inspect_special_encoding" in source_file 326
337 codes = Parallel(n_jobs=2)(delayed(_get_code)() for _ in range(5)) 338 assert len(set(codes)) == 1
13 import itertools 14 import pickle 15 import random
115 are_objs_identical = obj1 is obj2 116 assert are_hashes_equal == are_objs_identical 117
121 a = io.StringIO(unicode("a"))
122 assert hash(a.flush) == hash(a.flush)
123 a1 = collections.deque(range(10))
124 a2 = collections.deque(range(9)) 125 assert hash(a1.extend) != hash(a2.extend) 126
144 are_arrays_equal = np.all(obj1 == obj2) 145 assert are_hashes_equal == are_arrays_equal 146
146 147 assert hash(arr1) != hash(arr1.T) 148
156 157 assert hash(d1) == hash(d2) 158 assert hash(d1) != hash(d3)
157 assert hash(d1) == hash(d2) 158 assert hash(d1) != hash(d3) 159
167 array = np.arange(0, 10, dtype=dtype) 168 assert hash(array) != a_hash 169
174 b = np.ascontiguousarray(a) 175 assert hash(a) != hash(b) 176
177 c = np.asfortranarray(a) 178 assert hash(a) != hash(c) 179
191 ) 192 assert are_hashes_equal == coerce_mmap 193 finally:
227 def md5_hash(x): 228 return hashlib.md5(memoryview(x)).hexdigest() 229
230 relative_diff = relative_time(md5_hash, hash, a) 231 assert relative_diff < 0.3 232
237 relative_diff = 0.5 * (abs(time_hash - time_hashlib) / (time_hash + time_hashlib)) 238 assert relative_diff < 0.3 239
246 b = Klass() 247 assert hash(filter_args(a.f, [], (1,))) == hash(filter_args(b.f, [], (1,))) 248
255 b = KlassWithCachedMethod(tmpdir.strpath) 256 assert hash(filter_args(a.f.func, [], (1,))) == hash( 257 filter_args(b.f.func, [], (1,)) 258 ) 259
267 268 assert hash(a) == hash(b) 269
276 b = np.float64(3.0) 277 assert hash(a) != hash(b) 278
303 304 assert hash(a) == hash(b) 305
332 333 assert hash(a) == hash(b) 334
338 # ordering is not guaranteed
339 assert hash(set([Decimal(0), Decimal("NaN")])) == hash(
340 set([Decimal("NaN"), Decimal(0)])
341 )
342
349 b = {string: "bar"}
350 c = pickle.loads(pickle.dumps(b))
351 assert hash([a, b]) == hash([a, c])
350 c = pickle.loads(pickle.dumps(b)) 351 assert hash([a, b]) == hash([a, c]) 352
366 # simple dtypes objects are interned 367 assert dt1 is dt2 368 assert hash(dt1) == hash(dt2)
367 assert dt1 is dt2 368 assert hash(dt1) == hash(dt2) 369
369 370 dt1_roundtripped = pickle.loads(pickle.dumps(dt1)) 371 assert dt1 is not dt1_roundtripped
370 dt1_roundtripped = pickle.loads(pickle.dumps(dt1)) 371 assert dt1 is not dt1_roundtripped 372 assert hash(dt1) == hash(dt1_roundtripped)
371 assert dt1 is not dt1_roundtripped 372 assert hash(dt1) == hash(dt1_roundtripped) 373
373 374 assert hash([dt1, dt1]) == hash([dt1_roundtripped, dt1_roundtripped]) 375 assert hash([dt1, dt1]) == hash([dt1, dt1_roundtripped])
374 assert hash([dt1, dt1]) == hash([dt1_roundtripped, dt1_roundtripped]) 375 assert hash([dt1, dt1]) == hash([dt1, dt1_roundtripped]) 376
380 # complex dtypes objects are not interned 381 assert hash(complex_dt1) == hash(complex_dt2) 382
382 383 complex_dt1_roundtripped = pickle.loads(pickle.dumps(complex_dt1)) 384 assert complex_dt1_roundtripped is not complex_dt1
383 complex_dt1_roundtripped = pickle.loads(pickle.dumps(complex_dt1)) 384 assert complex_dt1_roundtripped is not complex_dt1 385 assert hash(complex_dt1) == hash(complex_dt1_roundtripped)
384 assert complex_dt1_roundtripped is not complex_dt1 385 assert hash(complex_dt1) == hash(complex_dt1_roundtripped) 386
386 387 assert hash([complex_dt1, complex_dt1]) == hash( 388 [complex_dt1_roundtripped, complex_dt1_roundtripped] 389 ) 390 assert hash([complex_dt1, complex_dt1]) == hash(
389 ) 390 assert hash([complex_dt1, complex_dt1]) == hash( 391 [complex_dt1_roundtripped, complex_dt1] 392 ) 393
413 # Expected results have been generated with joblib 0.9.2 414 assert hash(to_hash) == expected 415
401 ( 402 [random.Random(42).random() for _ in range(5)], 403 "a11ffad81f9682a7d901e6edc3d16c84",
423 arr_f = np.asfortranarray(arr_c) 424 assert hash(arr_c) != hash(arr_f) 425
433 def test_0d_and_1d_array_hashing_is_different(): 434 assert hash(np.array(0)) != hash(np.array([0])) 435
495 hash_2 = e2.submit(hash, obj_1).result() 496 assert hash_1 == hash_2 497
499 hash_3 = e1.submit(hash, obj_2).result() 500 assert hash_1 == hash_3 501
14 # rely on setting up the variable above 15 assert _top_import_error is None
5 import os 6 import pickle 7 import platform
7 import platform 8 import subprocess 9 import sys
46 def check_memmap_and_send_back(array): 47 assert _get_backing_memmap(array) is not None 48 return array
71 data, position, expected = args 72 assert data[position] == expected 73 data[position] *= 2
120 a_reconstructed = reconstruct_array_or_memmap(a) 121 assert has_shareable_memory(a_reconstructed) 122 assert isinstance(a_reconstructed, np.memmap)
121 assert has_shareable_memory(a_reconstructed) 122 assert isinstance(a_reconstructed, np.memmap) 123 assert_array_equal(a_reconstructed, a)
126 b_reconstructed = reconstruct_array_or_memmap(b) 127 assert has_shareable_memory(b_reconstructed) 128 assert_array_equal(b_reconstructed, b)
131 b2_reconstructed = reconstruct_array_or_memmap(b2) 132 assert has_shareable_memory(b2_reconstructed) 133 assert_array_equal(b2_reconstructed, b2)
136 c_reconstructed = reconstruct_array_or_memmap(c) 137 assert not isinstance(c_reconstructed, np.memmap) 138 assert has_shareable_memory(c_reconstructed)
137 assert not isinstance(c_reconstructed, np.memmap) 138 assert has_shareable_memory(c_reconstructed) 139 assert_array_equal(c_reconstructed, c)
141 d_reconstructed = reconstruct_array_or_memmap(d) 142 assert not isinstance(d_reconstructed, np.memmap) 143 assert has_shareable_memory(d_reconstructed)
142 assert not isinstance(d_reconstructed, np.memmap) 143 assert has_shareable_memory(d_reconstructed) 144 assert_array_equal(d_reconstructed, d)
148 a3 = a * 3 149 assert not has_shareable_memory(a3) 150 a3_reconstructed = reconstruct_array_or_memmap(a3)
150 a3_reconstructed = reconstruct_array_or_memmap(a3) 151 assert not has_shareable_memory(a3_reconstructed) 152 assert not isinstance(a3_reconstructed, np.memmap)
151 assert not has_shareable_memory(a3_reconstructed) 152 assert not isinstance(a3_reconstructed, np.memmap) 153 assert_array_equal(a3_reconstructed, a * 3)
156 b3 = np.asarray(a3) 157 assert not has_shareable_memory(b3) 158
159 b3_reconstructed = reconstruct_array_or_memmap(b3) 160 assert isinstance(b3_reconstructed, np.ndarray) 161 assert not has_shareable_memory(b3_reconstructed)
160 assert isinstance(b3_reconstructed, np.ndarray) 161 assert not has_shareable_memory(b3_reconstructed) 162 assert_array_equal(b3_reconstructed, b3)
203 """.format(filename=filename) 204 p = subprocess.Popen( 205 [sys.executable, "-c", cmd], stderr=subprocess.PIPE, stdout=subprocess.PIPE 206 ) 207 p.wait()
208 out, err = p.communicate() 209 assert p.returncode == 0, err.decode() 210 assert out == b""
209 assert p.returncode == 0, err.decode()
210 assert out == b""
211 msg = "tried to unlink {}, got PermissionError".format(filename)
211 msg = "tried to unlink {}, got PermissionError".format(filename)
212 assert msg in err.decode()
213
239 a_reconstructed = reconstruct_array_or_memmap(a) 240 assert has_shareable_memory(a_reconstructed) 241 assert isinstance(a_reconstructed, np.memmap)
240 assert has_shareable_memory(a_reconstructed) 241 assert isinstance(a_reconstructed, np.memmap) 242 assert_array_equal(a_reconstructed, a)
244 b_reconstructed = reconstruct_array_or_memmap(b) 245 assert has_shareable_memory(b_reconstructed) 246 assert_array_equal(b_reconstructed, b)
248 c_reconstructed = reconstruct_array_or_memmap(c) 249 assert has_shareable_memory(c_reconstructed) 250 assert_array_equal(c_reconstructed, c)
252 d_reconstructed = reconstruct_array_or_memmap(d) 253 assert has_shareable_memory(d_reconstructed) 254 assert_array_equal(d_reconstructed, d)
256 e_reconstructed = reconstruct_array_or_memmap(e) 257 assert has_shareable_memory(e_reconstructed) 258 assert_array_equal(e_reconstructed, e)
279 ) 280 assert isinstance(memmap_obj, np.memmap) 281 assert memmap_obj.offset == offset
280 assert isinstance(memmap_obj, np.memmap) 281 assert memmap_obj.offset == offset 282 memmap_backed_obj = _strided_from_memmap(
292 ) 293 assert _get_backing_memmap(memmap_backed_obj).offset == offset 294
331 # of new files on the FS 332 assert os.listdir(pool_temp_folder) == [] 333
374 a_view = np.asarray(a) 375 assert not isinstance(a_view, np.memmap) 376 assert has_shareable_memory(a_view)
375 assert not isinstance(a_view, np.memmap) 376 assert has_shareable_memory(a_view) 377
392 # creation of new files on the FS 393 assert os.listdir(pool_temp_folder) == [] 394
427 """.format(b=backend) 428 p = subprocess.Popen( 429 [sys.executable, "-c", cmd], stderr=subprocess.PIPE, stdout=subprocess.PIPE 430 ) 431 p.wait()
432 out, err = p.communicate() 433 assert p.returncode == 0, out.decode() + "\n\n" + err.decode() 434
470 env["PYTHONPATH"] = os.path.dirname(__file__) 471 p = subprocess.Popen( 472 [sys.executable, "-c", cmd], 473 stderr=subprocess.PIPE, 474 stdout=subprocess.PIPE, 475 env=env, 476 ) 477 p.wait()
478 out, err = p.communicate() 479 assert p.returncode == 0, err 480 assert out == b""
479 assert p.returncode == 0, err 480 assert out == b"" 481 assert b"resource_tracker" not in err
480 assert out == b"" 481 assert b"resource_tracker" not in err 482
496 ) 497 assert os.path.dirname(filename_2) != os.path.dirname(filename_1) 498
508 [filename_2] = p(delayed(getattr)(array, "filename") for _ in range(1)) 509 assert os.path.dirname(filename_2) == os.path.dirname(filename_1) 510
543 544 assert len(temp_dirs_thread_1) == 1 545 assert len(temp_dirs_thread_2) == 1
544 assert len(temp_dirs_thread_1) == 1 545 assert len(temp_dirs_thread_2) == 1 546
546 547 assert temp_dirs_thread_1 != temp_dirs_thread_2 548
613 for f1, f2, returncode in functions_and_returncodes: 614 p = subprocess.Popen( 615 [sys.executable, "-c", cmd.format(f1=f1, f2=f2)], 616 stderr=subprocess.PIPE, 617 stdout=subprocess.PIPE, 618 ) 619 p.wait()
620 _, err = p.communicate() 621 assert p.returncode == returncode, err.decode() 622 assert b"resource_tracker" not in err, err.decode()
621 assert p.returncode == returncode, err.decode() 622 assert b"resource_tracker" not in err, err.decode() 623
654 env["PYTHONPATH"] = os.path.dirname(__file__) 655 p = subprocess.Popen( 656 [sys.executable, "-c", cmd], 657 stderr=subprocess.PIPE, 658 stdout=subprocess.PIPE, 659 env=env, 660 ) 661 p.wait()
662 out, err = p.communicate() 663 assert p.returncode == 0, err.decode() 664 assert out == b"", out.decode()
663 assert p.returncode == 0, err.decode() 664 assert out == b"", out.decode() 665 assert b"resource_tracker" not in err
664 assert out == b"", out.decode() 665 assert b"resource_tracker" not in err 666
676 ) 677 assert _get_backing_memmap(result) is None 678
724 """.format(b=backend) 725 p = subprocess.Popen( 726 [sys.executable, "-c", cmd], stderr=subprocess.PIPE, stdout=subprocess.PIPE 727 ) 728 p.wait()
731 err = err.decode() 732 assert p.returncode == 0, out + "\n\n" + err 733 assert "resource_tracker" not in err, err
732 assert p.returncode == 0, out + "\n\n" + err 733 assert "resource_tracker" not in err, err 734
746 # Check that the tempfolder is empty 747 assert os.listdir(tmpdir.strpath) == [] 748
753 # The temporary folder for the pool is not provisioned in advance 754 assert os.listdir(tmpdir.strpath) == [] 755 assert not os.path.exists(p._temp_folder)
754 assert os.listdir(tmpdir.strpath) == [] 755 assert not os.path.exists(p._temp_folder) 756
757 small = np.ones(5, dtype=np.float32) 758 assert small.nbytes == 20 759 p.map(check_array, [(small, i, 1.0) for i in range(small.shape[0])])
761 # Memory has been copied, the pool filesystem folder is unused 762 assert os.listdir(tmpdir.strpath) == [] 763
765 large = np.ones(100, dtype=np.float64) 766 assert large.nbytes == 800 767 p.map(check_array, [(large, i, 1.0) for i in range(large.shape[0])])
770 # without per-child memory copies 771 assert os.path.isdir(p._temp_folder) 772 dumped_filenames = os.listdir(p._temp_folder)
772 dumped_filenames = os.listdir(p._temp_folder) 773 assert len(dumped_filenames) == 1 774
778 results = p.map(has_shareable_memory, [objects]) 779 assert not results[0] 780
871 env["PYTHONPATH"] = os.path.dirname(__file__) 872 p = subprocess.Popen( 873 [sys.executable, "-c", cmd], 874 stderr=subprocess.PIPE, 875 stdout=subprocess.PIPE, 876 env=env, 877 ) 878 p.wait()
881 filename = out.split("\n")[0]
882 assert p.returncode == 0, err or out
883 assert err == "" # no resource_tracker warnings.
882 assert p.returncode == 0, err or out 883 assert err == "" # no resource_tracker warnings. 884 assert not os.path.exists(filename)
883 assert err == "" # no resource_tracker warnings. 884 assert not os.path.exists(filename) 885
899 # Check that the tempfolder is empty 900 assert os.listdir(tmpdir.strpath) == [] 901
903 large = np.ones(100, dtype=np.float64) 904 assert large.nbytes == 800 905 p.map(check_array, [(large, i, 1.0) for i in range(large.shape[0])])
907 # Check that the tempfolder is still empty 908 assert os.listdir(tmpdir.strpath) == [] 909
936 pool_temp_folder = p._temp_folder 937 folder_prefix = "/dev/shm/joblib_memmapping_folder_" 938 assert pool_temp_folder.startswith(folder_prefix)
937 folder_prefix = "/dev/shm/joblib_memmapping_folder_" 938 assert pool_temp_folder.startswith(folder_prefix) 939 assert os.path.exists(pool_temp_folder)
938 assert pool_temp_folder.startswith(folder_prefix) 939 assert os.path.exists(pool_temp_folder) 940
942 a = np.ones(100, dtype=np.float64) 943 assert a.nbytes == 800 944 p.map(id, [a] * 10)
946 # pickling procedure generate one .pkl file: 947 assert len(os.listdir(pool_temp_folder)) == 1 948
952 b = np.ones(100, dtype=np.float64) * 2 953 assert b.nbytes == 800 954 p.map(id, [b] * 10)
955 # A copy of both a and b are now stored in the shared memory folder 956 assert len(os.listdir(pool_temp_folder)) == 2 957 finally:
992 pool_temp_folder = p._temp_folder
993 assert not pool_temp_folder.startswith("/dev/shm")
994 finally:
992 pool_temp_folder = p._temp_folder
993 assert not pool_temp_folder.startswith("/dev/shm")
994 finally:
999 # The temp folder is cleaned up upon pool termination 1000 assert not os.path.exists(pool_temp_folder) 1001 finally:
1026 large = res.get() 1027 assert not has_shareable_memory(large) 1028 assert_array_equal(large, np.ones(1000))
1035 """Multiplication function to be executed by subprocess""" 1036 assert has_shareable_memory(a) 1037 return a * n_times
1063 b = p.apply_async(_worker_multiply, args=(a, 3)).get() 1064 assert not has_shareable_memory(b) 1065 assert_array_equal(b, 3 * a)
1100 result = p.apply_async(identity, args=(obj,)).get() 1101 assert isinstance(result, np.memmap) 1102 assert result.offset == offset
1101 assert isinstance(result, np.memmap) 1102 assert result.offset == offset 1103 np.testing.assert_array_equal(obj, result)
1109 pool_folder, shared_mem = _get_temp_dir(pool_folder_name, tmpdir.strpath)
1110 assert shared_mem is False
1111 assert pool_folder == tmpdir.join("test.tmpdir").strpath
1110 assert shared_mem is False
1111 assert pool_folder == tmpdir.join("test.tmpdir").strpath
1112
1114 if sys.platform.startswith("win"):
1115 assert shared_mem is False
1116 assert pool_folder.endswith(pool_folder_name)
1115 assert shared_mem is False 1116 assert pool_folder.endswith(pool_folder_name) 1117
1132 if sys.platform.startswith("win"):
1133 assert shared_mem is False
1134 assert pool_folder.endswith(pool_folder_name)
1133 assert shared_mem is False 1134 assert pool_folder.endswith(pool_folder_name) 1135
1164 sleep(0.1) 1165 assert len(container) == 0 1166
1169 m.set(a, "a") 1170 assert m.get(a) == "a" 1171
1172 b = a 1173 assert m.get(b) == "a" 1174 m.set(b, "b")
1174 m.set(b, "b") 1175 assert m.get(a) == "b" 1176
1178 gc.collect() 1179 assert len(m._data) == 1 1180 assert m.get(b) == "b"
1179 assert len(m._data) == 1 1180 assert m.get(b) == "b" 1181
1186 m.set(c, "c") 1187 assert len(m._data) == 1 1188 assert m.get(c) == "c"
1187 assert len(m._data) == 1 1188 assert m.get(c) == "c" 1189
1202 m.set(a, i) 1203 assert m.get(a) == i 1204 return id(a)
1213 max_len_unique_ids = 400 if IS_GIL_DISABLED else 100 1214 assert len(unique_ids) < max_len_unique_ids 1215
1274 results = Parallel(n_jobs=2)(delayed(func)(x) for x in [arr]) 1275 assert not results[0].flags["F_CONTIGUOUS"] 1276 np.testing.assert_array_equal(results, ref)
15 import pathlib 16 import pickle 17 import shutil
64 for _ in range(2): 65 assert func(i) == i 66 assert len(accumulator) == i + 1
65 assert func(i) == i 66 assert len(accumulator) == i + 1 67
125 126 assert len(accumulator) == current_accumulator + 1 127 # Also, check that Memory.eval works similarly
127 # Also, check that Memory.eval works similarly 128 assert memory.eval(f, 1) == out 129 assert len(accumulator) == current_accumulator + 1
128 assert memory.eval(f, 1) == out 129 assert len(accumulator) == current_accumulator + 1 130
163 my_locals = {}
164 exec(
165 compile(
166 textwrap.dedent(ipython_cell_source),
167 filename=ipython_cell_id,
168 mode="exec",
169 ),
170 # TODO when Python 3.11 is the minimum supported version, use
171 # locals=my_locals instead of passing globals and locals in the
172 # next two lines as positional arguments
173 None,
174 my_locals,
175 )
176 f = my_locals["f"]
180 # identified f as an interactive function defined in a jupyter notebook 181 assert f(1) == 1 182 assert f.__code__.co_filename == ipython_cell_id
181 assert f(1) == 1 182 assert f.__code__.co_filename == ipython_cell_id 183
186 187 assert len(os.listdir(tmpdir / "joblib")) == 1 188 f_cache_relative_directory = os.listdir(tmpdir / "joblib")[0]
188 f_cache_relative_directory = os.listdir(tmpdir / "joblib")[0] 189 assert "ipython-input" in f_cache_relative_directory 190
194 # The cache should be empty as cached_f has not been called yet. 195 assert os.listdir(f_cache_directory) == ["f"] 196 assert os.listdir(f_cache_directory / "f") == []
195 assert os.listdir(f_cache_directory) == ["f"] 196 assert os.listdir(f_cache_directory / "f") == [] 197
202 # cached_f(3) 203 assert len(os.listdir(f_cache_directory / "f")) == 2 204
224 time.sleep(0.2) # pragma: no cover 225 assert len(os.listdir(f_cache_directory / "f")) == 3 226
231 # and 'func_code.py' 232 assert len(os.listdir(f_cache_directory / "f")) == 4 233 else:
234 # For the second session, there should be an already existing cache 235 assert len(os.listdir(f_cache_directory / "f")) == 4 236
240 # function in a new session 241 assert len(os.listdir(f_cache_directory / "f")) == 4 242
256 gg(1) 257 assert len(accumulator) == current_accumulator + 1 258
272 # Smoke test with an explicit keyword argument: 273 assert g(arg1=30, arg2=2) == 30 274
309 310 assert len(warninfo) == 1 311 assert "collision" in str(warninfo[0].message)
310 assert len(warninfo) == 1 311 assert "collision" in str(warninfo[0].message) 312
320 with warns(JobLibCollisionWarning) as warninfo: 321 assert a(0) == 0 322 assert b(1) == 2
321 assert a(0) == 0 322 assert b(1) == 2 323 assert a(1) == 1
322 assert b(1) == 2 323 assert a(1) == 1 324
326 # thus nothing is raised 327 assert len(warninfo) == 4 328
333 memory = Memory(location=tmpdir.strpath, verbose=0)
334 a1 = eval("lambda x: x")
335 a1 = memory.cache(a1)
335 a1 = memory.cache(a1)
336 b1 = eval("lambda x: x+1")
337 b1 = memory.cache(b1)
343 344 assert len(warninfo) == 2 345 assert "cannot detect" in str(warninfo[0].message).lower()
344 assert len(warninfo) == 2 345 assert "cannot detect" in str(warninfo[0].message).lower() 346
367
368 m = eval("lambda x: x")
369 mm = memory.cache(m)
370 371 assert mm(1) == 1 372
391 # argument x=[] 392 assert func() == 0 393 # the second time the argument is x=[None], which is not cached
394 # yet, so the functions should be called a second time 395 assert func() == 1 396
414 for _ in range(3): 415 assert np.all(cached_n(a) == a) 416 assert len(accumulator) == i + 1
415 assert np.all(cached_n(a) == a) 416 assert len(accumulator) == i + 1 417
433 434 assert isinstance(c, np.memmap) 435 assert c.mode == "r"
434 assert isinstance(c, np.memmap) 435 assert c.mode == "r" 436
436 437 assert isinstance(b, np.memmap) 438 assert b.mode == "r"
437 assert isinstance(b, np.memmap) 438 assert b.mode == "r" 439
450 d = twice(a) 451 assert len(recorded_warnings) == 1 452 exception_msg = "Exception while loading results"
452 exception_msg = "Exception while loading results" 453 assert exception_msg in recorded_warnings[0] 454 # Asserts that the recomputation returns a mmap
454 # Asserts that the recomputation returns a mmap 455 assert isinstance(d, np.memmap) 456 assert d.mode == "r"
455 assert isinstance(d, np.memmap) 456 assert d.mode == "r" 457
488 489 assert z.ignore == ["y"] 490
491 z(0, y=1) 492 assert len(accumulator) == 1 493 z(0, y=1)
493 z(0, y=1) 494 assert len(accumulator) == 1 495 z(0, y=2)
495 z(0, y=2) 496 assert len(accumulator) == 1 497
515 516 assert z.ignore == ["y"] 517
518 z(0, y=1) 519 assert len(accumulator) == 1 520 z(0, y=1)
520 z(0, y=1) 521 assert len(accumulator) == 1 522 z(0, y=2)
522 z(0, y=2) 523 assert len(accumulator) == 1 524
537 # It's possible to call a positional arg as a kwarg. 538 assert plus_one(1) == 2 539 assert plus_one(a=1) == 2
538 assert plus_one(1) == 2 539 assert plus_one(a=1) == 2 540
542 # before would cause a failure if it was passed as a kwarg. 543 assert plus_one(a=2) == 3 544
554 555 assert z.ignore == ignore 556 assert z._verbose == verbose
555 assert z.ignore == ignore 556 assert z._verbose == verbose 557 assert z.mmap_mode == mmap_mode
556 assert z._verbose == verbose 557 assert z.mmap_mode == mmap_mode 558
570 location = os.path.join(g.store_backend.location, func_id) 571 assert location == path 572 assert os.path.exists(path)
571 assert location == path 572 assert os.path.exists(path) 573 assert memory.location == os.path.dirname(g.store_backend.location)
572 assert os.path.exists(path) 573 assert memory.location == os.path.dirname(g.store_backend.location) 574
578 _FUNCTION_HASHES.clear() 579 assert not g._check_previous_func_code() 580 assert os.path.exists(os.path.join(path, "func_code.py"))
579 assert not g._check_previous_func_code() 580 assert os.path.exists(os.path.join(path, "func_code.py")) 581 assert g._check_previous_func_code()
580 assert os.path.exists(os.path.join(path, "func_code.py")) 581 assert g._check_previous_func_code() 582
586 a = g(1) 587 assert os.path.exists(output_dir) 588 os.remove(os.path.join(output_dir, "output.pkl"))
588 os.remove(os.path.join(output_dir, "output.pkl")) 589 assert a == g(1) 590
597 598 h = pickle.loads(pickle.dumps(g)) 599
601 output_dir = os.path.join(h.store_backend.location, h.func_id, args_id) 602 assert os.path.exists(output_dir) 603 assert output == h.store_backend.load_item([h.func_id, args_id])
602 assert os.path.exists(output_dir) 603 assert output == h.store_backend.load_item([h.func_id, args_id]) 604 memory2 = pickle.loads(pickle.dumps(memory))
603 assert output == h.store_backend.load_item([h.func_id, args_id]) 604 memory2 = pickle.loads(pickle.dumps(memory)) 605 assert memory.store_backend.location == memory2.store_backend.location
604 memory2 = pickle.loads(pickle.dumps(memory)) 605 assert memory.store_backend.location == memory2.store_backend.location 606
608 memory = Memory(location=None, verbose=0) 609 pickle.loads(pickle.dumps(memory)) 610 g = memory.cache(f)
610 g = memory.cache(f) 611 gp = pickle.loads(pickle.dumps(g)) 612 gp(1)
625 result = func.check_call_in_cache(2) 626 assert isinstance(result, bool) 627 assert not result
626 assert isinstance(result, bool) 627 assert not result 628 assert func(2) == 5
627 assert not result 628 assert func(2) == 5 629 result = func.check_call_in_cache(2)
629 result = func.check_call_in_cache(2) 630 assert isinstance(result, bool) 631 assert result == consider_cache_valid
630 assert isinstance(result, bool) 631 assert result == consider_cache_valid 632 func.clear()
634 func = NotMemorizedFunc(f) 635 assert not func.check_call_in_cache(2) 636
649 ): 650 assert func(2) == 5 651 result = func.call_and_shelve(2)
651 result = func.call_and_shelve(2) 652 assert isinstance(result, Result) 653 assert result.get() == 5
652 assert isinstance(result, Result) 653 assert result.get() == 5 654
668 time.sleep(0.5) 669 assert test_access_time_file.read() == "test_access" 670
681 ) 682 assert func(2) == 5 683 first_access_time = os.stat(result_path).st_atime
687 result = func.call_and_shelve(2) 688 assert isinstance(result, MemorizedResult) 689 assert os.stat(result_path).st_atime == first_access_time
688 assert isinstance(result, MemorizedResult) 689 assert os.stat(result_path).st_atime == first_access_time 690 time.sleep(1)
692 # Read the stored data => last access time is greater than first_access 693 assert result.get() == 5 694 assert os.stat(result_path).st_atime > first_access_time
693 assert result.get() == 5 694 assert os.stat(result_path).st_atime > first_access_time 695
703 with open(filename, "rb") as fp: 704 result2 = pickle.load(fp) 705 assert result2.get() == result.get()
704 result2 = pickle.load(fp) 705 assert result2.get() == result.get() 706 os.remove(filename)
714 result2 = func2.call_and_shelve(2) 715 assert result.get() == result2.get() 716 assert repr(func) == repr(func2)
715 assert result.get() == result2.get() 716 assert repr(func) == repr(func2) 717
794 out, err = capsys.readouterr() 795 assert out == "1\n2\nReloading\nx=1\n" 796
816 817 assert f(1, 2) == 3 818 assert f(1, 2) == 3
817 assert f(1, 2) == 3 818 assert f(1, 2) == 3 819
822 _function_to_cache.__code__ = _product.__code__ 823 assert f(1, 2) == 2 824 assert f(1, 2) == 2
823 assert f(1, 2) == 2 824 assert f(1, 2) == 2 825
843 844 assert func_cached(1, 2, kw1=3) == (1, 2, 3, "kw2") 845
861 func_cached = memory.cache(func_with_kwonly_args, ignore=["kw2"]) 862 assert func_cached(1, 2, kw1=3, kw2=4) == (1, 2, 3, 4) 863 assert func_cached(1, 2, kw1=3, kw2="ignored") == (1, 2, 3, 4)
862 assert func_cached(1, 2, kw1=3, kw2=4) == (1, 2, 3, 4) 863 assert func_cached(1, 2, kw1=3, kw2="ignored") == (1, 2, 3, 4) 864
869 870 assert func_cached(1, 2.0) == 3.0 871
897 hash_dirs = [ci.path for ci in items] 898 assert set(hash_dirs) == set(expected_hash_dirs) 899
905 hash_cache_sizes = [ci.size for ci in items] 906 assert hash_cache_sizes == expected_hash_cache_sizes 907
913 last_accesses = [ci.last_access for ci in items] 914 assert last_accesses == expected_last_accesses 915
920 items_to_delete = memory.store_backend._get_items_to_delete("1K")
921 assert items_to_delete == []
922
928 nb_hashes = len(expected_hash_cachedirs) 929 assert set.issubset(set(items_to_delete), set(items)) 930 assert len(items_to_delete) == nb_hashes - 1
929 assert set.issubset(set(items_to_delete), set(items)) 930 assert len(items_to_delete) == nb_hashes - 1 931
933 items_to_delete_2048b = memory.store_backend._get_items_to_delete(2048) 934 assert sorted(items_to_delete) == sorted(items_to_delete_2048b) 935
937 items_to_delete_empty = memory.store_backend._get_items_to_delete("1M")
938 assert items_to_delete_empty == []
939
944 ) 945 assert set(items_to_delete_500b), set(items) 946
951 952 assert max(ci.last_access for ci in items_to_delete_6000b) <= min( 953 ci.last_access for ci in surviving_items 954 ) 955
963 cache_items = memory.store_backend.get_items() 964 assert sorted(ref_cache_items) == sorted(cache_items) 965
969 cache_items = memory.store_backend.get_items() 970 assert sorted(ref_cache_items) == sorted(cache_items) 971
974 cache_items = memory.store_backend.get_items() 975 assert set.issubset(set(cache_items), set(ref_cache_items)) 976 assert len(cache_items) == 2
975 assert set.issubset(set(cache_items), set(ref_cache_items)) 976 assert len(cache_items) == 2 977
981 cache_items = memory.store_backend.get_items() 982 assert cache_items == [] 983
991 cache_items = memory.store_backend.get_items() 992 assert sorted(ref_cache_items) == sorted(cache_items) 993
997 cache_items = memory.store_backend.get_items() 998 assert sorted(ref_cache_items) == sorted(cache_items) 999
1002 cache_items = memory.store_backend.get_items() 1003 assert set.issubset(set(cache_items), set(ref_cache_items)) 1004 assert len(cache_items) == 2
1003 assert set.issubset(set(cache_items), set(ref_cache_items)) 1004 assert len(cache_items) == 2 1005
1008 cache_items = memory.store_backend.get_items() 1009 assert cache_items == [] 1010
1021 cache_items = memory.store_backend.get_items() 1022 assert sorted(ref_cache_items) == sorted(cache_items) 1023
1026 cache_items = memory.store_backend.get_items() 1027 assert sorted(ref_cache_items) == sorted(cache_items) 1028
1034 cache_items = memory.store_backend.get_items() 1035 assert not set.issubset(set(cache_items), set(ref_cache_items)) 1036 assert len(cache_items) == 2
1035 assert not set.issubset(set(cache_items), set(ref_cache_items)) 1036 assert len(cache_items) == 2 1037
1045 cache_items = memory.store_backend.get_items() 1046 assert cache_items == [] 1047
1052 1053 assert os.listdir(memory.store_backend.location) == [] 1054
1055 # Check that the cache for functions hash is also reset. 1056 assert not g._check_previous_func_code(stacklevel=4) 1057
1084 exception_msg = "Exception while loading results" 1085 assert exception_msg not in stdout 1086 assert exception_msg not in stderr
1085 assert exception_msg not in stdout 1086 assert exception_msg not in stderr 1087
1108 exception_msg = "Exception while loading results" 1109 assert exception_msg not in stdout 1110 assert exception_msg not in stderr
1109 assert exception_msg not in stdout 1110 assert exception_msg not in stderr 1111
1127 # Make sure the function is correctly cached 1128 assert arg == input_arg 1129
1137 recomputed_arg, recomputed_timestamp = cached_func(arg) 1138 assert len(recorded_warnings) == 1 1139 exception_msg = "Exception while loading results"
1139 exception_msg = "Exception while loading results" 1140 assert exception_msg in recorded_warnings[0] 1141 assert recomputed_arg == arg
1140 assert exception_msg in recorded_warnings[0] 1141 assert recomputed_arg == arg 1142 assert recomputed_timestamp > timestamp
1141 assert recomputed_arg == arg 1142 assert recomputed_timestamp > timestamp 1143
1154 message = "is corrupted" 1155 assert message in str(e.args) 1156
1218 with raises(TypeError) as excinfo: 1219 Memory(location="/tmp/joblib", backend="unknown") 1220 excinfo.match(r"Unknown location*")
1236 ) 1237 assert expected_mesage in str(warninfo[0].message) 1238
1244 register_store_backend(backend_name, IncompleteStoreBackend) 1245 assert (backend_name, IncompleteStoreBackend) in _STORE_BACKENDS.items() 1246 with raises(TypeError) as excinfo:
1258 register_store_backend(backend_name, DummyStoreBackend) 1259 assert (backend_name, DummyStoreBackend) in _STORE_BACKENDS.items() 1260
1261 backend_obj = _store_backend_factory(backend_name, "dummy_location") 1262 assert isinstance(backend_obj, DummyStoreBackend) 1263
1269 try: 1270 assert backend_obj.location == "some_folder" 1271 finally: # remove cache folder after test
1279 backend = FileSystemStoreBackend() 1280 assert backend.location is None 1281
1283 1284 assert str(backend) == repr_pattern.format(location=None) 1285
1289 1290 assert str(backend) == repr_pattern.format(location=tmpdir.strpath) 1291
1305 1306 assert str(memorized_func) == memorized_func_repr.format( 1307 func=my_func, location=memory.store_backend.location 1308 ) 1309
1315 1316 assert str(memorized_result) == memorized_result_repr.format( 1317 location=memory.store_backend.location, 1318 func=memorized_result.func_id, 1319 args_id=memorized_result.args_id, 1320 ) 1321
1321
1322 assert str(memory) == "Memory(location={location})".format(
1323 location=memory.store_backend.location
1324 )
1325
1339 memorized_result_pickle = pickle.dumps(memorized_result) 1340 memorized_result_loads = pickle.loads(memorized_result_pickle) 1341
1341 1342 assert ( 1343 memorized_result.store_backend.location 1344 == memorized_result_loads.store_backend.location 1345 ) 1346 assert memorized_result.func == memorized_result_loads.func
1345 ) 1346 assert memorized_result.func == memorized_result_loads.func 1347 assert memorized_result.args_id == memorized_result_loads.args_id
1346 assert memorized_result.func == memorized_result_loads.func 1347 assert memorized_result.args_id == memorized_result_loads.args_id 1348 assert str(memorized_result) == str(memorized_result_loads)
1347 assert memorized_result.args_id == memorized_result_loads.args_id 1348 assert str(memorized_result) == str(memorized_result_loads) 1349
1356 right_vars = vars(right) 1357 assert set(left_vars.keys()) == set(right_vars.keys()) 1358 for attr in left_vars.keys():
1360 continue 1361 assert left_vars[attr] == right_vars[attr] 1362
1373 1374 memory_reloaded = pickle.loads(pickle.dumps(memory)) 1375
1382 ) 1383 assert hash(memory) == hash(memory_reloaded) 1384
1386 1387 func_cached_reloaded = pickle.loads(pickle.dumps(func_cached)) 1388
1395 ) 1396 assert hash(func_cached) == hash(func_cached_reloaded) 1397
1399 memorized_result = func_cached.call_and_shelve(1) 1400 memorized_result_reloaded = pickle.loads(pickle.dumps(memorized_result)) 1401
1407 ) 1408 assert hash(memorized_result) == hash(memorized_result_reloaded) 1409
1421 _ = f(x) 1422 assert "Querying" in caplog.text 1423 caplog.clear()
1431 _ = f(x) 1432 assert "Querying" not in caplog.text 1433 caplog.clear()
1460 d1, d2 = {"run": False}, {"run": False}
1461 assert f(2, d1) == 4
1462 assert f(2, d2) == 4
1461 assert f(2, d1) == 4 1462 assert f(2, d2) == 4 1463
1463 1464 assert d1["run"] 1465 assert d2["run"] != consider_cache_valid
1464 assert d1["run"] 1465 assert d2["run"] != consider_cache_valid 1466
1480 d1, d2 = {"run": False}, {"run": False}
1481 assert f(2, d1, delay=0) == 4
1482 assert f(2, d2, delay=0) == 4
1481 assert f(2, d1, delay=0) == 4 1482 assert f(2, d2, delay=0) == 4 1483 assert d1["run"]
1482 assert f(2, d2, delay=0) == 4 1483 assert d1["run"] 1484 assert d2["run"]
1483 assert d1["run"] 1484 assert d2["run"] 1485
1487 d1, d2 = {"run": False}, {"run": False}
1488 assert f(2, d1, delay=0.2) == 4
1489 assert f(2, d2, delay=0.2) == 4
1488 assert f(2, d1, delay=0.2) == 4 1489 assert f(2, d2, delay=0.2) == 4 1490 assert d1["run"]
1489 assert f(2, d2, delay=0.2) == 4 1490 assert d1["run"] 1491 assert not d2["run"]
1490 assert d1["run"] 1491 assert not d2["run"] 1492
1500 d1, d2, d3 = {"run": False}, {"run": False}, {"run": False}
1501 assert f(2, d1) == 4
1502 assert f(2, d2) == 4
1501 assert f(2, d1) == 4 1502 assert f(2, d2) == 4 1503 time.sleep(0.5)
1503 time.sleep(0.5) 1504 assert f(2, d3) == 4 1505
1505 1506 assert d1["run"] 1507 assert not d2["run"]
1506 assert d1["run"] 1507 assert not d2["run"] 1508 assert d3["run"]
1507 assert not d2["run"] 1508 assert d3["run"] 1509
1524 counter = {}
1525 assert f(2, counter) == 1
1526 assert f(2, counter) == 1
1525 assert f(2, counter) == 1 1526 assert f(2, counter) == 1 1527
1528 x, meta = f.call(2, counter) 1529 assert x == 2, "f has not been called properly" 1530 assert isinstance(meta, dict), (
1529 assert x == 2, "f has not been called properly" 1530 assert isinstance(meta, dict), ( 1531 "Metadata are not returned by MemorizedFunc.call." 1532 ) 1533
1539 counter = {}
1540 assert f(2, counter) == 1
1541 assert f(2, counter) == 2
1540 assert f(2, counter) == 1 1541 assert f(2, counter) == 2 1542
1543 x, meta = f.call(2, counter) 1544 assert x == 3, "f has not been called properly" 1545 assert isinstance(meta, dict), (
1544 assert x == 3, "f has not been called properly" 1545 assert isinstance(meta, dict), ( 1546 "Metadata are not returned by MemorizedFunc.call." 1547 ) 1548
1573 with open(path_to_gitignore_file) as f: 1574 assert gitignore_file_content == f.read() 1575
26 value = await func(i) 27 assert value == i 28 assert len(accumulator) == i + 1
27 assert value == i 28 assert len(accumulator) == i + 1 29
62 63 assert len(accumulator) == current_accumulator + 1 64 # Also, check that Memory.eval works similarly
65 evaled = await memory.eval(f, 1) 66 assert evaled == out 67 assert len(accumulator) == current_accumulator + 1
66 assert evaled == out 67 assert len(accumulator) == current_accumulator + 1 68
89 await gg(1) 90 assert len(accumulator) == current_accumulator + 1 91
107 108 assert isinstance(c, np.memmap) 109 assert c.mode == "r"
108 assert isinstance(c, np.memmap) 109 assert c.mode == "r" 110
110 111 assert isinstance(b, np.memmap) 112 assert b.mode == "r"
111 assert isinstance(b, np.memmap) 112 assert b.mode == "r" 113
124 d = await twice(a) 125 assert len(recorded_warnings) == 1 126 exception_msg = "Exception while loading results"
126 exception_msg = "Exception while loading results" 127 assert exception_msg in recorded_warnings[0] 128 # Asserts that the recomputation returns a mmap
128 # Asserts that the recomputation returns a mmap 129 assert isinstance(d, np.memmap) 130 assert d.mode == "r"
129 assert isinstance(d, np.memmap) 130 assert d.mode == "r" 131
155 result = await func.call_and_shelve(2) 156 assert isinstance(result, Result) 157 assert result.get() == 5
156 assert isinstance(result, Result) 157 assert result.get() == 5 158
174 counter = {}
175 assert await gg(2, counter) == 1
176 assert await gg(2, counter) == 1
175 assert await gg(2, counter) == 1 176 assert await gg(2, counter) == 1 177
178 x, meta = await gg.call(2, counter) 179 assert x == 2, "f has not been called properly" 180 assert isinstance(meta, dict), "Metadata are not returned by MemorizedFunc.call."
179 assert x == 2, "f has not been called properly" 180 assert isinstance(meta, dict), "Metadata are not returned by MemorizedFunc.call."
6 import os 7 import subprocess 8 import sys
26 env["PYTHONPATH"] = ":".join([str(tmp_path)] + sys.path)
27 subprocess.check_call(
28 [
29 sys.executable,
30 "-c",
31 "import joblib, math; "
32 "joblib.Parallel(n_jobs=1)("
33 "joblib.delayed(math.sqrt)(i**2) for i in range(10))",
34 ],
35 env=env,
36 )
8 def test_version(): 9 assert hasattr(joblib, "__version__"), ( 10 "There are no __version__ argument on the joblib module" 11 ) 12
8 import os 9 import pickle 10 import random
121 if member == copy.deepcopy(member): 122 assert member == _member 123
150 # All is cached in one file 151 assert len(filenames) == 1 152 # Check that only one file was created
152 # Check that only one file was created 153 assert filenames[0] == filename 154 # Check that this file does exist
154 # Check that this file does exist 155 assert os.path.exists(filenames[0]) 156
160 for item in obj_: 161 assert isinstance(item, np.ndarray) 162 # And finally, check that all the values are equal.
168 # All is cached in one file 169 assert len(filenames) == 1 170
173 # We don't reconstruct memmaps 174 assert isinstance(obj_, type(obj)) 175
181 # All is cached in one file 182 assert len(filenames) == 1 183
184 obj_loaded = numpy_pickle.load(filename) 185 assert isinstance(obj_loaded, type(obj)) 186 np.testing.assert_array_equal(obj_loaded.array_float, obj.array_float)
208 209 assert isinstance(b, np.memmap) 210
215 obj_loaded = numpy_pickle.load(filename, mmap_mode="r") 216 assert isinstance(obj_loaded, type(obj)) 217 assert isinstance(obj_loaded.array_float, np.memmap)
216 assert isinstance(obj_loaded, type(obj)) 217 assert isinstance(obj_loaded.array_float, np.memmap) 218 assert not obj_loaded.array_float.flags.writeable
217 assert isinstance(obj_loaded.array_float, np.memmap) 218 assert not obj_loaded.array_float.flags.writeable 219 assert isinstance(obj_loaded.array_int, np.memmap)
218 assert not obj_loaded.array_float.flags.writeable 219 assert isinstance(obj_loaded.array_int, np.memmap) 220 assert not obj_loaded.array_int.flags.writeable
219 assert isinstance(obj_loaded.array_int, np.memmap) 220 assert not obj_loaded.array_int.flags.writeable 221 # Memory map not allowed for numpy object arrays
221 # Memory map not allowed for numpy object arrays 222 assert not isinstance(obj_loaded.array_obj, np.memmap) 223 np.testing.assert_array_equal(obj_loaded.array_float, obj.array_float)
228 obj_loaded = numpy_pickle.load(filename, mmap_mode="r+") 229 assert obj_loaded.array_float.flags.writeable 230 obj_loaded.array_float[0:10] = 10.0
230 obj_loaded.array_float[0:10] = 10.0 231 assert obj_loaded.array_int.flags.writeable 232 obj_loaded.array_int[0:10] = 10
239 numpy_pickle.load(filename, mmap_mode="w+") 240 assert obj_loaded.array_int.flags.writeable 241 assert obj_loaded.array_int.mode == "r+"
240 assert obj_loaded.array_int.flags.writeable 241 assert obj_loaded.array_int.mode == "r+" 242 assert obj_loaded.array_float.flags.writeable
241 assert obj_loaded.array_int.mode == "r+" 242 assert obj_loaded.array_float.flags.writeable 243 assert obj_loaded.array_float.mode == "r+"
242 assert obj_loaded.array_float.flags.writeable 243 assert obj_loaded.array_float.mode == "r+" 244
258 # the floating point array has been memory mapped 259 assert isinstance(a_clone, np.memmap) 260
261 # the object-dtype array has been loaded in memory 262 assert not isinstance(b_clone, np.memmap) 263
274 b = numpy_pickle.load(filename, mmap_mode="r") 275 assert isinstance(b, np.ma.masked_array) 276
288 warninfo = [w.message for w in warninfo] 289 assert not isinstance(reloaded_obj, np.memmap) 290 np.testing.assert_array_equal(obj, reloaded_obj)
290 np.testing.assert_array_equal(obj, reloaded_obj) 291 assert len(warninfo) == 1, debug_msg 292 assert (
291 assert len(warninfo) == 1, debug_msg
292 assert (
293 str(warninfo[0]) == 'mmap_mode "r+" is not compatible with compressed '
294 f'file {this_filename}. "r+" flag will be ignored.'
295 )
296
314 write_buf_size = _IO_BUFFER_SIZE + 16 * 1024**2 / 1e6 315 assert mem_used <= write_buf_size 316
320 read_buf_size = 32 + _IO_BUFFER_SIZE # MiB 321 assert mem_used < size + read_buf_size 322
338 dumped_filenames = numpy_pickle.dump(expected_list, fname, compress=1) 339 assert len(dumped_filenames) == 1 340 result_list = numpy_pickle.load(fname)
343 expected = _ensure_native_byte_order(expected) 344 assert result.dtype == expected.dtype 345 np.testing.assert_equal(result, expected)
346 else: 347 assert result == expected 348
366 367 assert le_array_native_load.dtype == be_array_native_load.dtype 368 assert le_array_native_load.dtype in all_dtypes
367 assert le_array_native_load.dtype == be_array_native_load.dtype 368 assert le_array_native_load.dtype in all_dtypes 369
373 374 assert le_array_nonnative_load.dtype == le_array.dtype 375 assert be_array_nonnative_load.dtype == be_array.dtype
374 assert le_array_nonnative_load.dtype == le_array.dtype 375 assert be_array_nonnative_load.dtype == be_array.dtype 376
421 )
422 assert len(warninfo) == expected_nb_warnings, (
423 "Did not get the expected number of warnings. Expected "
424 f"{expected_nb_warnings} but got warnings: "
425 f"{[w.message for w in warninfo]}"
426 )
427
432 for w in deprecation_warnings:
433 assert (
434 str(w.message)
435 == "The file '{0}' has been generated with a joblib "
436 "version less than 0.10. Please regenerate this "
437 "pickle file.".format(filename)
438 )
439
441 escaped_filename = re.escape(filename)
442 assert re.search(
443 f"memmapped.+{escaped_filename}.+segmentation fault", str(w.message)
444 )
445
448 expected = _ensure_native_byte_order(expected) 449 assert result.dtype == expected.dtype 450 np.testing.assert_equal(result, expected)
451 else: 452 assert result == expected 453 except Exception as exc:
456 if py_version_used_for_writing == 2: 457 assert isinstance(exc, ValueError) 458 message = (
461 )
462 assert message in str(exc)
463 elif filename.endswith(".lz4") and with_lz4.args[0]:
463 elif filename.endswith(".lz4") and with_lz4.args[0]:
464 assert isinstance(exc, ValueError)
465 assert LZ4_NOT_INSTALLED_ERROR in str(exc)
464 assert isinstance(exc, ValueError) 465 assert LZ4_NOT_INSTALLED_ERROR in str(exc) 466 else:
477 message = "unsupported pickle protocol: {0}".format(pickle_writing_protocol)
478 assert message in str(e.args)
479
554 if sys.byteorder == "big": 555 assert not _is_numpy_array_byte_order_mismatch(array) 556 else:
556 else: 557 assert _is_numpy_array_byte_order_mismatch(array) 558 converted = _ensure_native_byte_order(array)
562 else: 563 assert converted.dtype.byteorder == "=" 564
574 if sys.byteorder == "little": 575 assert not _is_numpy_array_byte_order_mismatch(array) 576 else:
576 else: 577 assert _is_numpy_array_byte_order_mismatch(array) 578 converted = _ensure_native_byte_order(array)
582 else: 583 assert converted.dtype.byteorder == "=" 584
592 with open(filename, "rb") as f: 593 assert _detect_compressor(f) == compress_tuple[0] 594
627 with open(filename, "rb") as f: 628 assert _detect_compressor(f) == compress_string 629
659 with open(dump_filename, "rb") as f: 660 assert _detect_compressor(f) == cmethod 661 # Verify the reloaded object is correct
662 obj_reloaded = numpy_pickle.load(dump_filename) 663 assert isinstance(obj_reloaded, type(obj)) 664 if isinstance(obj, np.ndarray):
666 else: 667 assert obj_reloaded == obj 668
706 obj_reloaded = numpy_pickle.load(filename_raw) 707 assert obj == obj_reloaded 708
734 with open(dump_fname, "rb") as f: 735 assert _detect_compressor(f) == cmethod 736 # Verify the reloaded object is correct
737 obj_reloaded = numpy_pickle.load(dump_fname) 738 assert isinstance(obj_reloaded, type(obj)) 739 assert obj_reloaded == obj
738 assert isinstance(obj_reloaded, type(obj)) 739 assert obj_reloaded == obj 740
768 else: 769 assert obj_reloaded == obj 770 assert obj_reloaded_2 == obj
769 assert obj_reloaded == obj 770 assert obj_reloaded_2 == obj 771
782 else: 783 assert obj_reloaded == obj 784
810 numpy_pickle.load(f, mmap_mode="r+") 811 assert len(warninfo) == 1 812 assert (
811 assert len(warninfo) == 1
812 assert (
813 str(warninfo[0].message)
814 == '"%(fileobj)r" is not a raw file, mmap_mode "%(mmap_mode)s" '
815 "flag will be ignored." % {"fileobj": f, "mmap_mode": "r+"}
816 )
817
827 numpy_pickle.load(buf, mmap_mode="r+") 828 assert len(warninfo) == 1 829 assert (
828 assert len(warninfo) == 1
829 assert (
830 str(warninfo[0].message)
831 == "In memory persistence is not compatible with mmap_mode "
832 '"%(mmap_mode)s" flag passed. mmap_mode option will be '
833 "ignored." % {"mmap_mode": "r+"}
834 )
835
851 with BinaryZlibFile(f, "wb", compresslevel=compress_level) as fz: 852 assert fz.writable() 853 fz.write(data)
853 fz.write(data) 854 assert fz.fileno() == f.fileno() 855 with raises(io.UnsupportedOperation):
859 fz._check_can_seek() 860 assert fz.closed 861 with raises(ValueError):
865 with BinaryZlibFile(f) as fz: 866 assert fz.readable() 867 assert fz.seekable()
866 assert fz.readable() 867 assert fz.seekable() 868 assert fz.fileno() == f.fileno()
867 assert fz.seekable() 868 assert fz.fileno() == f.fileno() 869 assert fz.read() == data
868 assert fz.fileno() == f.fileno() 869 assert fz.read() == data 870 with raises(io.UnsupportedOperation):
871 fz._check_can_write() 872 assert fz.seekable() 873 fz.seek(0)
873 fz.seek(0) 874 assert fz.tell() == 0 875 assert fz.closed
874 assert fz.tell() == 0 875 assert fz.closed 876
878 with BinaryZlibFile(filename, "wb", compresslevel=compress_level) as fz: 879 assert fz.writable() 880 fz.write(data)
882 with BinaryZlibFile(filename, "rb") as fz: 883 assert fz.read() == data 884 assert fz.seekable()
883 assert fz.read() == data 884 assert fz.seekable() 885
887 fz = BinaryZlibFile(filename, "wb", compresslevel=compress_level) 888 assert fz.writable() 889 fz.write(data)
892 fz = BinaryZlibFile(filename, "rb") 893 assert fz.read() == data 894 fz.close()
841 # More bytes
842 10000 * "{}".format(random.randint(0, 1000) * 1000).encode("latin-1"),
843 ],
951 c = numpy_pickle.load(filename) 952 assert isinstance(c, SubArray) 953 np.testing.assert_array_equal(c, a)
959 numpy_pickle.dump(value, Path(filename)) 960 assert numpy_pickle.load(filename) == value 961 numpy_pickle.dump(value, filename)
961 numpy_pickle.dump(value, filename) 962 assert numpy_pickle.load(Path(filename)) == value 963
975 ]: 976 assert not array.flags.c_contiguous 977 assert not array.flags.f_contiguous
976 assert not array.flags.c_contiguous 977 assert not array.flags.f_contiguous 978 numpy_pickle.dump(array, filename)
1040 memmaps = numpy_pickle.load(fname, mmap_mode="r") 1041 assert isinstance(memmaps[1], np.memmap) 1042 assert memmaps[1].offset > size
1041 assert isinstance(memmaps[1], np.memmap) 1042 assert memmaps[1].offset > size 1043 np.testing.assert_array_equal(obj, memmaps)
1061 1062 assert _COMPRESSORS[compressor_name].fileobj_factory == BinaryCompressorTestFile 1063 assert _COMPRESSORS[compressor_name].prefix == compressor_prefix
1062 assert _COMPRESSORS[compressor_name].fileobj_factory == BinaryCompressorTestFile 1063 assert _COMPRESSORS[compressor_name].prefix == compressor_prefix 1064
1119 1120 assert compressor_name in _COMPRESSORS 1121 assert _COMPRESSORS[compressor_name].fileobj_factory == gzip.GzipFile
1120 assert compressor_name in _COMPRESSORS 1121 assert _COMPRESSORS[compressor_name].fileobj_factory == gzip.GzipFile 1122
1133 compressor = "lz4" 1134 assert compressor in _COMPRESSORS 1135 assert _COMPRESSORS[compressor].fileobj_factory == lz4.frame.LZ4FrameFile
1134 assert compressor in _COMPRESSORS 1135 assert _COMPRESSORS[compressor].fileobj_factory == lz4.frame.LZ4FrameFile 1136
1141 with open(fname, "rb") as f: 1142 assert f.read(len(_LZ4_PREFIX)) == _LZ4_PREFIX 1143 assert numpy_pickle.load(fname) == data
1142 assert f.read(len(_LZ4_PREFIX)) == _LZ4_PREFIX 1143 assert numpy_pickle.load(fname) == data 1144
1147 with open(fname, "rb") as f: 1148 assert f.read(len(_LZ4_PREFIX)) == _LZ4_PREFIX 1149 assert numpy_pickle.load(fname) == data
1148 assert f.read(len(_LZ4_PREFIX)) == _LZ4_PREFIX 1149 assert numpy_pickle.load(fname) == data 1150
1180 memmap = numpy_pickle.load(fname, mmap_mode="r") 1181 assert isinstance(memmap, np.memmap) 1182 np.testing.assert_array_equal(a, memmap)
1182 np.testing.assert_array_equal(a, memmap) 1183 assert memmap.ctypes.data % numpy_pickle.NUMPY_ARRAY_ALIGNMENT_BYTES == 0 1184 assert memmap.flags.aligned
1183 assert memmap.ctypes.data % numpy_pickle.NUMPY_ARRAY_ALIGNMENT_BYTES == 0 1184 assert memmap.flags.aligned 1185
1198 for idx, memmap in enumerate(l_reloaded): 1199 assert isinstance(memmap, np.memmap) 1200 np.testing.assert_array_equal(array_list[idx], memmap)
1200 np.testing.assert_array_equal(array_list[idx], memmap) 1201 assert memmap.ctypes.data % numpy_pickle.NUMPY_ARRAY_ALIGNMENT_BYTES == 0 1202 assert memmap.flags.aligned
1201 assert memmap.ctypes.data % numpy_pickle.NUMPY_ARRAY_ALIGNMENT_BYTES == 0 1202 assert memmap.flags.aligned 1203
1221 for key, memmap in d_reloaded.items(): 1222 assert isinstance(memmap, np.memmap) 1223 np.testing.assert_array_equal(array_dict[key], memmap)
1223 np.testing.assert_array_equal(array_dict[key], memmap) 1224 assert memmap.ctypes.data % numpy_pickle.NUMPY_ARRAY_ALIGNMENT_BYTES == 0 1225 assert memmap.flags.aligned
1224 assert memmap.ctypes.data % numpy_pickle.NUMPY_ARRAY_ALIGNMENT_BYTES == 0 1225 assert memmap.flags.aligned
15 data_read = numpy_pickle_compat.read_zfile(f) 16 assert data == data_read
19 from multiprocessing import TimeoutError 20 from pickle import PicklingError 21 from time import sleep
154 def test_cpu_count(): 155 assert cpu_count() > 0 156
158 def test_effective_n_jobs(): 159 assert effective_n_jobs() > 0 160
174 # in the backend 175 assert effective_n_jobs(n_jobs=None) == expected_n_jobs 176 # without any backend, None will default to a single job
176 # without any backend, None will default to a single job 177 assert effective_n_jobs(n_jobs=None) == 1 178
187 def test_simple_parallel(backend, n_jobs, verbose): 188 assert [square(x) for x in range(5)] == Parallel( 189 n_jobs=n_jobs, backend=backend, verbose=verbose 190 )(delayed(square)(x) for x in range(5)) 191
211 lens.add(b - a) 212 assert len(lens) == 1 213
230 ) 231 assert results == [0, 1, 4] 232
254 # positive because of the name change. 255 assert len(warninfo) == 0 256
277 else: 278 assert not warninfo 279 return True
313 if parent_backend == "threading": 314 assert any(res) 315 else:
315 else: 316 assert all(res) 317
332 t.join() 333 assert is_run_parallel[0] 334
362 Parallel(n_jobs=2, backend="threading")(delayed(q.put)(1) for _ in range(5)) 363 assert q.full() 364
369 lst = range(10) 370 assert [f(x, y=1) for x in lst] == Parallel(n_jobs=n_jobs)( 371 delayed(f)(x, y=1) for x in lst 372 ) 373
386 # the managed block: 387 assert expected == p(delayed(f)(x, y=1) for x in lst) 388 assert expected == p(delayed(f)(x, y=1) for x in lst)
387 assert expected == p(delayed(f)(x, y=1) for x in lst) 388 assert expected == p(delayed(f)(x, y=1) for x in lst) 389
391 if mp is not None: 392 assert get_workers(managed_backend) is get_workers(p._backend) 393
396 if mp is not None: 397 assert get_workers(p._backend) is None 398
399 # It's still possible to use the parallel instance in non-managed mode: 400 assert expected == p(delayed(f)(x, y=1) for x in lst) 401 if mp is not None:
401 if mp is not None: 402 assert get_workers(p._backend) is None 403
437 for x_returned, byteorder_in_worker in result: 438 assert byteorder_in_worker == initial_np_byteorder 439 assert byteorder_in_worker == x_returned.dtype.byteorder
438 assert byteorder_in_worker == initial_np_byteorder 439 assert byteorder_in_worker == x_returned.dtype.byteorder 440 np.testing.assert_array_equal(x, x_returned)
445 # Check that timeout isn't thrown when function is fast enough 446 assert ( 447 len( 448 Parallel(n_jobs=2, backend=backend, timeout=30)( 449 delayed(sleep)(0.001) for x in range(10) 450 ) 451 ) 452 == 10 453 ) 454
504 with Parallel(n_jobs=2, backend=backend) as parallel: 505 assert get_workers(parallel._backend) is not None 506 original_workers = get_workers(parallel._backend)
512 # state despite the previously raised (and caught) exception 513 assert get_workers(parallel._backend) is not None 514
515 # The pool should have been interrupted and restarted: 516 assert get_workers(parallel._backend) is not original_workers 517
517 518 assert [f(x, y=1) for x in range(10)] == parallel( 519 delayed(f)(x, y=1) for x in range(10) 520 ) 521
526 # The pool should still be available despite the exception 527 assert get_workers(parallel._backend) is not None 528
529 # The pool should have been interrupted and restarted: 530 assert get_workers(parallel._backend) is not original_workers 531
531 532 assert [f(x, y=1) for x in range(10)] == parallel( 533 delayed(f)(x, y=1) for x in range(10) 534 ), ( 535 parallel._iterating, 536 parallel.n_completed_tasks, 537 parallel.n_dispatched_tasks, 538 parallel._aborting, 539 ) 540
542 # context manager 543 assert get_workers(parallel._backend) is None 544 else:
639 ) 640 assert queue == expected_queue 641 assert len(queue) == 12
640 assert queue == expected_queue 641 assert len(queue) == 12 642
662 queue_contents = list(queue) 663 assert queue_contents[0] == "Produced 0" 664
667 first_consumption_index = queue_contents[:4].index("Consumed any")
668 assert first_consumption_index > -1
669
670 produced_3_index = queue_contents.index("Produced 3") # 4th task produced
671 assert produced_3_index > first_consumption_index
672
672 673 assert len(queue) == 12 674
682 p(delayed(id)(i) for i in range(5000)) # many very fast tasks 683 assert p._backend.compute_batch_size() == 1 684
694 # it's a strictly positive number. 695 assert p._backend.compute_batch_size() > 0 696
731 report = "".join(report_lines) 732 assert "nested_function_outer" in report 733 assert "nested_function_inner" in report
732 assert "nested_function_outer" in report 733 assert "nested_function_inner" in report 734 assert "exception_raiser" in report
733 assert "nested_function_inner" in report 734 assert "exception_raiser" in report 735
735 736 assert type(excinfo.value) is ValueError 737
769 Parallel(n_jobs=0, backend=backend)._initialize_backend() 770 assert "n_jobs == 0 in Parallel has no meaning" in str(excinfo.value) 771
773 Parallel(n_jobs=0.5, backend=backend)._initialize_backend() 774 assert "n_jobs == 0 in Parallel has no meaning" in str(excinfo.value) 775
777 Parallel(n_jobs="2.3", backend=backend)._initialize_backend() 778 assert "n_jobs could not be converted to int" in str(excinfo.value) 779
781 Parallel(n_jobs="invalid_str", backend=backend)._initialize_backend() 782 assert "n_jobs could not be converted to int" in str(excinfo.value) 783
789 p = Parallel(n_jobs=n_jobs, backend=backend) 790 assert p._effective_n_jobs() == 2 791
792 res = p(delayed(square)(i) for i in range(10)) 793 assert all(r == square(i) for i, r in enumerate(res)) 794
798 register_parallel_backend("test_backend", FakeParallelBackend)
799 assert "test_backend" in BACKENDS
800 assert BACKENDS["test_backend"] == FakeParallelBackend
799 assert "test_backend" in BACKENDS 800 assert BACKENDS["test_backend"] == FakeParallelBackend 801 finally:
806 default_backend_orig = parallel.DEFAULT_BACKEND 807 assert _active_backend_type() == get_default_backend_instance() 808 try:
809 register_parallel_backend("threading", BACKENDS["threading"], make_default=True)
810 assert _active_backend_type() == ThreadingBackend
811 finally:
813 parallel.DEFAULT_BACKEND = default_backend_orig 814 assert _active_backend_type() == get_default_backend_instance() 815
829 active_backend, active_n_jobs = parallel.get_active_backend() 830 assert active_n_jobs == 3 831 assert effective_n_jobs(3) == 3
830 assert active_n_jobs == 3 831 assert effective_n_jobs(3) == 3 832 p = Parallel()
832 p = Parallel() 833 assert p.n_jobs == 3 834 if backend_name == "multiprocessing":
834 if backend_name == "multiprocessing": 835 assert type(active_backend) is MultiprocessingBackend 836 assert type(p._backend) is MultiprocessingBackend
835 assert type(active_backend) is MultiprocessingBackend 836 assert type(p._backend) is MultiprocessingBackend 837 elif backend_name == "loky":
837 elif backend_name == "loky": 838 assert type(active_backend) is LokyBackend 839 assert type(p._backend) is LokyBackend
838 assert type(active_backend) is LokyBackend 839 assert type(p._backend) is LokyBackend 840 elif backend_name == "threading":
840 elif backend_name == "threading": 841 assert type(active_backend) is ThreadingBackend 842 assert type(p._backend) is ThreadingBackend
841 assert type(active_backend) is ThreadingBackend
842 assert type(p._backend) is ThreadingBackend
843 elif backend_name.startswith("test_"):
843 elif backend_name.startswith("test_"):
844 assert type(active_backend) is FakeParallelBackend
845 assert type(p._backend) is FakeParallelBackend
844 assert type(active_backend) is FakeParallelBackend 845 assert type(p._backend) is FakeParallelBackend 846
858 859 assert _active_backend_type() == get_default_backend_instance() 860 # check that this possible to switch parallel backends sequentially
863 # The default backend is restored 864 assert _active_backend_type() == get_default_backend_instance() 865
873 # The default backend is again restored 874 assert _active_backend_type() == get_default_backend_instance() 875
888 monkeypatch.setitem(BACKENDS, "param_backend", ParameterizedParallelBackend) 889 assert _active_backend_type() == get_default_backend_instance() 890
892 active_backend, active_n_jobs = parallel.get_active_backend() 893 assert type(active_backend) is ParameterizedParallelBackend 894 assert active_backend.param == 42
893 assert type(active_backend) is ParameterizedParallelBackend 894 assert active_backend.param == 42 895 assert active_n_jobs == 3
894 assert active_backend.param == 42 895 assert active_n_jobs == 3 896 p = Parallel()
896 p = Parallel() 897 assert p.n_jobs == 3 898 assert p._backend is active_backend
897 assert p.n_jobs == 3 898 assert p._backend is active_backend 899 results = p(delayed(sqrt)(i) for i in range(5))
899 results = p(delayed(sqrt)(i) for i in range(5)) 900 assert results == [sqrt(i) for i in range(5)] 901
902 # The default backend is again restored 903 assert _active_backend_type() == get_default_backend_instance() 904
907 def test_directly_parameterized_backend_context_manager(context): 908 assert _active_backend_type() == get_default_backend_instance() 909
913 active_backend, active_n_jobs = parallel.get_active_backend() 914 assert type(active_backend) is ParameterizedParallelBackend 915 assert active_backend.param == 43
914 assert type(active_backend) is ParameterizedParallelBackend 915 assert active_backend.param == 43 916 assert active_n_jobs == 5
915 assert active_backend.param == 43 916 assert active_n_jobs == 5 917 p = Parallel()
917 p = Parallel() 918 assert p.n_jobs == 5 919 assert p._backend is active_backend
918 assert p.n_jobs == 5 919 assert p._backend is active_backend 920 results = p(delayed(sqrt)(i) for i in range(5))
920 results = p(delayed(sqrt)(i) for i in range(5)) 921 assert results == [sqrt(i) for i in range(5)] 922
923 # The default backend is again restored 924 assert _active_backend_type() == get_default_backend_instance() 925
932 def get_nested_pids(): 933 assert _active_backend_type() == ThreadingBackend 934 # Assert that the nested backend does not change the default number of
935 # jobs used in Parallel 936 assert Parallel()._effective_n_jobs() == 1 937
964 for pid_group in pid_groups: 965 assert len(set(pid_group)) == 1 966
978 # backend for nested calls. 979 assert _active_backend_type() == BACKENDS[expected_backend_type] 980
983 expected_n_job = effective_n_jobs(expected_n_job) 984 assert Parallel()._effective_n_jobs() == expected_n_job 985
1001 backend = ctx[0] 1002 assert backend.nesting_level == expected_level 1003
1045 if context is parallel_config: 1046 assert ctx["backend"].i == 1 1047 if context is parallel_backend:
1047 if context is parallel_backend: 1048 assert ctx[0].i == 1 1049
1083 results = Parallel(**params)(delayed(square)(i) for i in range(n_tasks)) 1084 assert results == expected 1085
1092 start_method = context.get_start_method() 1093 assert start_method == mp_start_method 1094
1322 exhausted_iterator = iter([]) 1323 assert Parallel(n_jobs=2)(exhausted_iterator) == [] 1324
1374 (result,) = Parallel(n_jobs=2)(delayed(identity)(memmap) for _ in [0]) 1375 assert isinstance(memmap[1], np.memmap) 1376 assert memmap[1].offset > size
1375 assert isinstance(memmap[1], np.memmap) 1376 assert memmap[1].offset > size 1377 np.testing.assert_array_equal(obj, result)
1382 Parallel(n_jobs=1, timeout=1)(delayed(square)(i) for i in range(50)) 1383 assert len(warninfo) == 1 1384 w = warninfo[0]
1384 w = warninfo[0] 1385 assert isinstance(w.message, UserWarning) 1386 assert str(w.message) == (
1385 assert isinstance(w.message, UserWarning) 1386 assert str(w.message) == ( 1387 "The backend class 'SequentialBackend' does not support timeout. " 1388 "You have set 'timeout=1' in Parallel but the 'timeout' parameter " 1389 "will not be used." 1390 ) 1391
1410 1411 assert all(v == r for v, r in zip(input_list, result)) 1412
1435 1436 assert all(v == r for v, r in zip(expected_quickly_returned, quickly_returned)) 1437
1458 dt = time.time() - t_start 1459 assert dt < 20 1460
1503 # Make sure that the error is raised quickly 1504 assert time.time() - t_start < 2, ( 1505 "The error should be raised immediately when submitting a new task " 1506 "but it took more than 2s." 1507 ) 1508
1525 # Make sure that the error is raised quickly 1526 assert time.time() - t_start < 2, ( 1527 "The error should be raised immediately when submitting a new task " 1528 "but it took more than 2s." 1529 ) 1530
1552 1553 assert all(res == i for res, i in zip(g, range(10))) 1554 assert all(res == i for res, i in zip(g2, range(10, 20)))
1553 assert all(res == i for res, i in zip(g, range(10))) 1554 assert all(res == i for res, i in zip(g2, range(10, 20))) 1555
1593 1594 assert all(res == i for res, i in zip(g, range(10, 20))) 1595
1595 1596 assert time.time() - t_start < 5 1597
1602 time.sleep(0.5) 1603 assert time.time() - t_start < 5 1604
1607 # state might be delayed). 1608 assert parallel._aborting 1609
1624 # The memmap folder should not be clean in the context scope 1625 assert len(os.listdir(tmpdir)) > 0 1626
1655 ) 1656 assert results == [i**2 for i in range(10)] 1657
1668 p(delayed(time.sleep)(task_time) for i in range(n_inputs)) 1669 assert p._backend._effective_batch_size == p._backend._DEFAULT_EFFECTIVE_BATCH_SIZE 1670 assert (
1669 assert p._backend._effective_batch_size == p._backend._DEFAULT_EFFECTIVE_BATCH_SIZE 1670 assert ( 1671 p._backend._smoothed_batch_duration 1672 == p._backend._DEFAULT_SMOOTHED_BATCH_DURATION 1673 ) 1674
1675 p(delayed(time.sleep)(task_time) for i in range(n_inputs)) 1676 assert p._backend._effective_batch_size == p._backend._DEFAULT_EFFECTIVE_BATCH_SIZE 1677 assert (
1676 assert p._backend._effective_batch_size == p._backend._DEFAULT_EFFECTIVE_BATCH_SIZE 1677 assert ( 1678 p._backend._smoothed_batch_duration 1679 == p._backend._DEFAULT_SMOOTHED_BATCH_DURATION 1680 ) 1681
1686 for n_jobs in [1, 2, -1]: 1687 assert type(Parallel(n_jobs=n_jobs)._backend) is get_default_backend_instance() 1688
1689 p = Parallel(n_jobs=n_jobs, prefer="threads") 1690 assert type(p._backend) is ThreadingBackend 1691
1692 p = Parallel(n_jobs=n_jobs, prefer="processes") 1693 assert type(p._backend) is LokyBackend 1694
1695 p = Parallel(n_jobs=n_jobs, require="sharedmem") 1696 assert type(p._backend) is ThreadingBackend 1697
1700 p = Parallel(n_jobs=2, backend="loky", prefer="threads") 1701 assert type(p._backend) is LokyBackend 1702
1706 p = Parallel(prefer="threads") 1707 assert type(p._backend) is LokyBackend 1708 assert p.n_jobs == 2
1707 assert type(p._backend) is LokyBackend 1708 assert p.n_jobs == 2 1709
1712 p = Parallel(n_jobs=3, prefer="threads") 1713 assert type(p._backend) is LokyBackend 1714 assert p.n_jobs == 3
1713 assert type(p._backend) is LokyBackend 1714 assert p.n_jobs == 3 1715
1721 p = Parallel(require="sharedmem") 1722 assert type(p._backend) is ThreadingBackend 1723 assert p.n_jobs == 1
1722 assert type(p._backend) is ThreadingBackend 1723 assert p.n_jobs == 1 1724
1726 p = Parallel(n_jobs=3, require="sharedmem") 1727 assert type(p._backend) is ThreadingBackend 1728 assert p.n_jobs == 3
1727 assert type(p._backend) is ThreadingBackend 1728 assert p.n_jobs == 3 1729
1740 ) 1741 assert results == expected_results 1742
1744 results = Parallel()(delayed(square)(i) for i in range(10)) 1745 assert results == expected_results 1746
1763 p = Parallel(n_jobs=2, prefer="processes") # ignored 1764 assert type(p._backend) is MyCustomThreadingBackend 1765
1766 p = Parallel(n_jobs=2, require="sharedmem") 1767 assert type(p._backend) is MyCustomThreadingBackend 1768
1780 p = Parallel(n_jobs=2, prefer="processes") 1781 assert type(p._backend) is MyCustomProcessingBackend 1782
1783 out, err = capsys.readouterr() 1784 assert out == "" 1785 assert err == ""
1784 assert out == "" 1785 assert err == "" 1786
1787 p = Parallel(n_jobs=2, require="sharedmem", verbose=10) 1788 assert type(p._backend) is ThreadingBackend 1789
1795 ) 1796 assert out.strip() == expected 1797 assert err == ""
1796 assert out.strip() == expected 1797 assert err == "" 1798
1851 ] 1852 assert backend_types_and_levels == expected_types_and_levels 1853
1884 1885 assert isinstance(exc, RecursionError) 1886
1910 for worker_env_vars, omp_num_threads in results: 1911 assert omp_num_threads == expected_num_threads 1912 for name, value in worker_env_vars.items():
1913 if name.endswith("_THREADS"):
1914 assert value == str(expected_num_threads)
1915 else:
1915 else: 1916 assert name == "ENABLE_IPC" 1917 assert value == "1"
1916 assert name == "ENABLE_IPC" 1917 assert value == "1" 1918
1958 1959 assert check_globals() == "original value" 1960
1963 )
1964 assert set(workers_global_variable) == {"original value"}
1965
1968 MY_GLOBAL_VARIABLE = "changed value" 1969 assert check_globals() == "changed value" 1970
1973 )
1974 assert set(workers_global_variable) == {"changed value"}
1975
2013 expected = {min(num_threads, parent_max_num_threads), num_threads}
2014 assert child_module["num_threads"] in expected
2015
2099 results = Parallel(n_jobs=n_jobs)(delayed(_get_env)(var_name) for i in range(2)) 2100 assert results == ["4", "4"] 2101
2105 ) 2106 assert results == ["1", "1"] 2107
2133 executor = get_reusable_executor(reuse=True) 2134 assert executor == first_executor 2135
2143 state = status.get(pid, None)
2144 assert state in ("initialized", "started"), (
2145 f"worker should have been in initialized state, got {state}"
2146 )
2147 if not wait_workers:
2220 pids = pids.union(set(results)) 2221 assert len(pids) == n_jobs, ( 2222 "The workers should be reused when the initializer is the same" 2223 ) 2224
2247 pids = pids.union(set(results)) 2248 assert len(pids) == n_repetitions * n_jobs, ( 2249 "The workers should not be reused when the initializer arguments change" 2250 )
3 # test_concurrency_safe_write which pickles big python objects 4 import cPickle as cpickle 5 except ImportError:
5 except ImportError: 6 import pickle as cpickle 7 import functools
8 import time 9 from pickle import PicklingError 10
32 with open(filename, "rb") as f: 33 reloaded = cpickle.load(f) 34 break
41 raise 42 assert expected == reloaded 43
24 def test_eval_expr_valid(expr, result): 25 assert eval_expr(expr) == result
6 import re 7 import subprocess 8 import sys
45 """ 46 proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) 47
264 self.__compile__() 265 assert self.__cache__ is not None 266 # Chain can be empty, if rules disabled. But we still have to return Array.
34 # Use reversed logic in links start/end match 35 assert tokens is not None 36 i = len(tokens)
38 i -= 1 39 assert isinstance(tokens, list) 40 currentToken = tokens[i]
19 # But basically, the index will not be negative. 20 assert index >= 0 21 return string[:index] + ch + string[index + 1 :]
99 else: 100 assert node.nester_tokens 101 token_list.append(node.nester_tokens.opening)
163 return self.token.type
164 assert self.nester_tokens
165 return self.nester_tokens.opening.type.removesuffix("_open")
318 def feed(self, next_bytes): 319 assert self._feeding 320 view = _get_data_from_buffer(next_bytes)
383 break 384 assert isinstance(read_data, bytes) 385 self._buffer += read_data
558 return self._ext_hook(n, bytes(obj)) 559 assert typ == TYPE_IMMEDIATE 560 return obj
753 data = obj.data 754 assert isinstance(code, int) 755 assert isinstance(data, bytes)
754 assert isinstance(code, int) 755 assert isinstance(data, bytes) 756 L = len(data)
60 except SystemExit as system_exit: 61 assert isinstance(system_exit.code, int) 62 exit_status = system_exit.code
48 return type 49 assert isinstance(tvar, TypeVarType) 50 values = tvar.values
104 tvars = callable.variables 105 assert len(orig_types) <= len(tvars) 106 # Check that inferred type variable values are compatible with allowed
111 for tvar, type in zip(tvars, orig_types): 112 assert not isinstance(type, PartialType), "Internal error: must never apply partial type" 113 if type is None:
130 callable = expand_type(callable, id_to_type) 131 assert isinstance(callable, CallableType) 132 return callable.copy_modified(
140 callable = expand_type(callable, id_to_type) 141 assert isinstance(callable, CallableType) 142 return callable.copy_modified(variables=[tv for tv in tvars if tv.id not in id_to_type])
171 typ = expand_type(tv, id_to_type) 172 assert isinstance(typ, TypeVarLikeType) 173 remaining_tvars.append(typ)
244 245 assert isinstance(result, ProperType) and isinstance(result, CallableType) 246 result.variables = result.variables + tuple(found_vars)
290 repl = new_args[param_spec_index] 291 assert isinstance(repl, ProperType) and isinstance(repl, Parameters) 292 repl.variables = list(repl.variables) + list(found_vars)
299 call = mypy.subtypes.find_member("__call__", t, t, is_operator=True)
300 assert call is not None
301 return call.accept(
78 elif actual_kind.is_named(): 79 assert actual_names is not None, "Internal error: named kinds without names given" 80 name = actual_names[ai]
85 else: 86 assert actual_kind == nodes.ARG_STAR2 87 actualt = get_proper_type(actual_arg_type(ai))
228 fallback = unpacked 229 assert ( 230 isinstance(fallback, Instance) 231 and fallback.type.fullname == "builtins.tuple" 232 ) 233 item = fallback.args[0]
247 # Lookup type based on keyword argument name. 248 assert formal_name is not None 249 else:
203 key = literal_hash(expr) 204 assert key is not None, "Internal error: binder tried to put non-literal" 205 if key not in self.declarations:
218 key = literal_hash(expr) 219 assert key is not None, "Internal error: binder tried to get non-literal" 220 found = self._get(key)
235 key = literal_hash(expr) 236 assert key is not None, "Internal error: binder tried cleanse non-literal" 237 self._cleanse_key(key)
286 current_type = resulting_values[0] 287 assert current_type is not None 288 type = current_type.type
298 for t in resulting_values: 299 assert t is not None 300 possible_types.append(t.type)
463 key = literal_hash(expr) 464 assert key is not None 465 for dep in self.dependencies.get(key, set()):
522 """ 523 assert len(self.frames) > 1 524
550 """ 551 assert len(self.frames) == 1 552 yield self.push_frame()
553 self.pop_frame(True, 0) 554 assert len(self.frames) == 1 555
599 unpack = last.items[unpack_index] 600 assert isinstance(unpack, UnpackType) 601 unpacked = get_proper_type(unpack.type)
603 return typ 604 assert unpacked.type.fullname == "builtins.tuple" 605 suffix = last.items[unpack_index + 1 :]
457 if plugin_dir is not None: 458 assert sys.path[0] == plugin_dir 459 del sys.path[0]
523 if hasattr(module, "__file__"): 524 assert module.__file__ is not None 525 with open(module.__file__, "rb") as f:
964 deps_json = DEPS_ROOT_FILE
965 assert deps_json
966 manager.log("Writing deps cache", deps_json)
1132 module_deps_metas = deps_meta["deps_meta"] 1133 assert isinstance(module_deps_metas, dict) 1134 if not manager.options.skip_cache_mtime_checks:
1188 else: 1189 assert isinstance(result, dict) 1190 return result
1405 bazel = manager.options.bazel 1406 assert path is not None, "Internal error: meta was provided without a path" 1407 if not manager.options.skip_cache_mtime_checks:
1640 options = manager.options.clone_for_module(id)
1641 assert source_hash is not None
1642 meta = {
1927 if not temporary: 1928 assert id or path or source is not None, "Neither id, path nor source given" 1929 self.manager = manager
1943 if not path and source is None: 1944 assert id is not None 1945 try:
1989 all_deps = self.dependencies + self.suppressed
1990 assert len(all_deps) == len(self.meta.dep_prios)
1991 self.priorities = {id: pri for id, pri in zip(all_deps, self.meta.dep_prios)}
1991 self.priorities = {id: pri for id, pri in zip(all_deps, self.meta.dep_prios)}
1992 assert len(all_deps) == len(self.meta.dep_lines)
1993 self.dep_line_map = {id: line for id, line in zip(all_deps, self.meta.dep_lines)}
2022 def xmeta(self) -> CacheMeta: 2023 assert self.meta, "missing meta on allegedly fresh module" 2024 return self.meta
2108 def load_tree(self, temporary: bool = False) -> None: 2109 assert ( 2110 self.meta is not None 2111 ), "Internal error: this method must be called only for cached modules" 2112
2136 def fix_cross_refs(self) -> None: 2137 assert self.tree is not None, "Internal error: method must be called on parsed file only" 2138 # We need to set allow_missing when doing a fine grained cache
2178 # tests have predictable output.) 2179 assert ioerr.errno is not None 2180 raise CompileError(
2198 else: 2199 assert source is not None 2200 self.source_hash = compute_hash(source)
2257 options = self.options 2258 assert self.tree is not None 2259
2308 manager = self.manager 2309 assert self.tree is not None 2310
2348 if not self._type_checker: 2349 assert self.tree is not None, "Internal error: must be called on parsed file only" 2350 manager = self.manager
2364 # point no temporary type maps can be active. 2365 assert len(self.type_checker()._type_maps) == 1 2366 return self.type_checker()._type_maps[0]
2377 def detect_possibly_undefined_vars(self) -> None: 2378 assert self.tree is not None, "Internal error: method must be called on parsed file only" 2379 if self.tree.is_stub:
2396 def finish_passes(self) -> None: 2397 assert self.tree is not None, "Internal error: method must be called on parsed file only" 2398 manager = self.manager
2453 def _patch_indirect_dependencies(self, module_refs: set[str], types: list[Type]) -> None: 2454 assert None not in types 2455 valid = self.valid_references()
2469 def compute_fine_grained_deps(self) -> dict[str, set[str]]:
2470 assert self.tree is not None
2471 if self.id in ("builtins", "typing", "types", "sys", "_typeshed"):
2497 def valid_references(self) -> set[str]: 2498 assert self.ancestors is not None 2499 valid_refs = set(self.dependencies + self.suppressed + self.ancestors)
2507 def write_cache(self) -> None: 2508 assert self.tree is not None, "Internal error: method must be called on parsed file only" 2509 # We don't support writing cache files in fine-grained incremental mode.
2533 dep_lines = self.dependency_lines() 2534 assert self.source_hash is not None 2535 assert len(set(self.dependencies)) == len(
2534 assert self.source_hash is not None
2535 assert len(set(self.dependencies)) == len(
2536 self.dependencies
2537 ), f"Duplicates in dependencies list for {self.id} ({self.dependencies})"
2538 new_interface_hash, self.meta = write_cache(
2563 manager = self.manager 2564 assert self.ancestors is not None 2565 if suppressed_only:
2831 if "{stub_dist}" in note:
2832 assert dist is not None
2833 note = note.format(stub_dist=dist)
2835 if reason is ModuleNotFoundReason.APPROVED_STUBS_NOT_INSTALLED: 2836 assert dist is not None 2837 manager.missing_stub_packages.add(dist)
2844 """Produce an error for an import ignored due to --follow_imports=error""" 2845 assert caller_state, (id, path) 2846 save_import_context = manager.errors.import_context()
3153 for st in new: 3154 assert st.ancestors is not None 3155 # Strip out indirect dependencies. These will be dealt with
3231 3232 assert newst.id not in graph, newst.id 3233 graph[newst.id] = newst
3490 typing_mod = graph["typing"].tree 3491 assert typing_mod, "The typing module was not parsed" 3492 mypy.semanal_main.semantic_analysis_for_scc(graph, scc, manager.errors)
3614 ref_info_file = ".".join(data_file.split(".")[:-2]) + ".refs.json"
3615 assert not ref_info_file.startswith(".")
3616
83 return read_float(data)
84 assert False, f"Unknown literal tag {tag}"
85
531 532 assert not self.current_node_deferred 533
536 all_node = all_.node 537 assert all_node is not None 538 seq_str = self.named_generic_type(
571 else: 572 assert not self.deferred_nodes 573 self.deferred_nodes = []
605 606 assert not self.current_node_deferred 607 # TODO: Handle __all__
726 # HACK: Infer the type of the property. 727 assert isinstance(defn.items[0], Decorator) 728 self.visit_decorator(defn.items[0])
755 for i, fdef in enumerate(defn.items): 756 assert isinstance(fdef, Decorator) 757 if defn.is_property:
757 if defn.is_property: 758 assert isinstance(defn.items[0], Decorator) 759 settable = defn.items[0].var.is_settable_property
778 for item in defn.items: 779 assert isinstance(item, Decorator) 780 item_type = self.extract_callable_type(item.var.type, item)
788 # code paths are getting property type this way. 789 assert isinstance(defn.items[0], Decorator) 790 var_type = self.extract_callable_type(defn.items[0].var.type, defn)
822 finally: 823 assert self.overload_impl_stack.pop() == impl 824
876 877 assert isinstance(outer_type, CallableType) 878 return outer_type
902 else: 903 assert False, "Impl isn't the right type" 904
910 for i, item in enumerate(defn.items): 911 assert isinstance(item, Decorator) 912 sig1 = self.extract_callable_type(item.var.type, item)
916 for j, item2 in enumerate(defn.items[i + 1 :]): 917 assert isinstance(item2, Decorator) 918 sig2 = self.extract_callable_type(item2.var.type, item2)
956 if impl_type is not None: 957 assert defn.impl is not None 958
1158 return AnyType(TypeOfAny.from_another_any, source_any=return_type) 1159 assert isinstance(return_type, Instance), "Should only be called on coroutine functions." 1160 # Note: return type is the 3rd type parameter of Coroutine.
1252 def check_func_def_override(self, defn: FuncDef, new_type: FunctionLike) -> None: 1253 assert defn.original_def is not None 1254 if isinstance(defn.original_def, FuncDef):
1838 1839 assert defn.info 1840
1870 ) 1871 assert len(reverse_type.arg_types) >= 2 1872
2268 # The caller should handle deferrals. 2269 assert typ is not None and original_type is not None 2270
2304 # the reasoning above.
2305 assert f"{defn.name}-redefinition" in defn.info.names
2306 return False
2466 else: 2467 assert False, "Need to check all FunctionLike subtypes here" 2468
2799 def check_enum(self, defn: ClassDef) -> None: 2800 assert defn.info.is_enum 2801 if defn.info.fullname not in ENUM_BASES and "__members__" in defn.info.names:
3017 else: 3018 assert first_type and second_type 3019 ok = is_subtype(first_type, second_type, ignore_pos_arg_names=True)
3101 lvalue_type = AnyType(TypeOfAny.special_form) 3102 assert isinstance(assign.rvalue, NameExpr) 3103 message = message_registry.INCOMPATIBLE_IMPORT_OF.format(assign.rvalue.name)
3480 typ = var.type 3481 assert isinstance(typ, PartialType) 3482 if typ.type is None:
3569 if base_type: 3570 assert base_node is not None 3571 if not self.check_compatibility_super(
3587 # the getter type is ready. 3588 assert base_type is not None 3589 if not is_subtype(base_type, lvalue_type):
3667 self_type = fill_typevars(current_class) 3668 assert self_type is not None, "Internal error: base lookup outside class" 3669 if isinstance(self_type, TupleType):
3785 lv = lvs[0] 3786 assert isinstance(lv, RefExpr) 3787 if lv.node is not None:
3787 if lv.node is not None: 3788 assert isinstance(lv.node, Var) 3789 if (
3899 else: 3900 assert ( 3901 isinstance(unpacked, Instance) 3902 and unpacked.type.fullname == "builtins.tuple" 3903 ) 3904 fallback = unpacked
4130 for type, declared_type in items: 4131 assert declared_type is not None 4132 clean_items.append((type, declared_type))
4207 return 4208 assert isinstance(reinferred_rvalue_type, TupleType) 4209 rvalue_type = reinferred_rvalue_type
4350 if isinstance(lvalue, NameExpr): 4351 assert isinstance(lvalue.node, Var) 4352 inferred = lvalue.node
4353 else: 4354 assert isinstance(lvalue, MemberExpr) 4355 self.expr_checker.accept(lvalue.expr)
4476 arg1 = erase_type(arg1) 4477 assert isinstance(arg1, Instance) 4478 partial_type = PartialType(init_type.type, name, arg1)
4549 var.is_settable_property = True 4550 assert isinstance(definition, Decorator), definition 4551 var.setter_type = definition.var.setter_type
5150 # that follows the try statement.) 5151 assert iter_errors is not None 5152 if not self.binder.is_unreachable():
5376 super_instance = map_instance_to_supertype(typ, supertype) 5377 assert len(super_instance.args) == 1 5378 return super_instance.args[0]
5718 # When matching a tuple expression with a sequence pattern, narrow individual tuple items 5719 assert len(expr.items) == len(typ_.items) 5720 for item_expr, item_typ in zip(expr.items, typ_.items):
5736 node = expr.node 5737 assert node is not None 5738 all_captures[node].append((expr, typ))
5766 # If it didn't exist before ``match``, it's a Var. 5767 assert isinstance(var, Var) 5768 inferred_types[var] = new_type
5832 curr_module = self.scope.stack[0] 5833 assert isinstance(curr_module, MypyFile) 5834
5863 if (symbol := curr_module_.names.get(name)) is not None: 5864 assert isinstance(symbol.node, TypeInfo) 5865 return symbol.node, name
5923 cur_module = self.scope.stack[0]
5924 assert isinstance(cur_module, MypyFile)
5925 gen_name = gen_unique_name(f"<callable subtype of {typ.type.name}>", cur_module.names)
6369 else: 6370 assert type_is is not None 6371 return conditional_types_to_typemaps(
6530 elif operator in {"in", "not in"}:
6531 assert len(expr_indices) == 2
6532 left_index, right_index = expr_indices
6759 try: 6760 assert str_literals is not None 6761 member_types = [new_parent_type.items[key] for key in str_literals]
6773 try: 6774 assert int_literals is not None 6775 member_types = [new_parent_type.items[key] for key in int_literals]
7129 if self.is_len_of_tuple(it): 7130 assert isinstance(it, CallExpr) 7131 tuples.append(it.args[0])
7151 r_size = self.literal_int_expr(right) 7152 assert r_size is not None 7153 if r_size > MAX_PRECISE_TUPLE_SIZE:
7155 continue 7156 assert isinstance(left, CallExpr) 7157 yes_type, no_type = self.narrow_with_len(
7196 else: 7197 assert False, "Unsupported type for len narrowing" 7198
7211 unpack = typ.items[unpack_index] 7212 assert isinstance(unpack, UnpackType) 7213 unpacked = get_proper_type(unpack.type)
7237 # we adjust the variadic item by "eating away" from it to satisfy the restriction. 7238 assert isinstance(unpacked, Instance) and unpacked.type.fullname == "builtins.tuple" 7239 min_len = typ.length() - 1
7403 call = find_member("__call__", supertype, subtype, is_operator=True)
7404 assert call is not None
7405 if not is_subtype(subtype, call, options=self.options):
7488 if isinstance(node, TypeAlias): 7489 assert isinstance(node.target, Instance) # type: ignore[misc] 7490 node = node.target.type
7490 node = node.target.type 7491 assert isinstance(node, TypeInfo), node 7492 any_type = AnyType(TypeOfAny.from_omitted_generics)
7509 node = sym.node 7510 assert isinstance(node, TypeInfo), node 7511 return node
7562 if b: 7563 assert isinstance(b.node, MypyFile) 7564 table = b.node.names
7576 sym = n.names.get(parts[i]) 7577 assert sym is not None, "Internal error: attempted lookup of unknown name" 7578 assert isinstance(sym.node, MypyFile)
7577 assert sym is not None, "Internal error: attempted lookup of unknown name" 7578 assert isinstance(sym.node, MypyFile) 7579 n = sym.node
7901 first_item = node.items[0] 7902 assert isinstance(first_item, Decorator) 7903 return first_item.var.is_settable_property
8000 # have effect on modules. 8001 assert result.extra_attrs is not None 8002 result.extra_attrs.mod_name = None
8238 else:
8239 assert typ is not None
8240 maps.append({expr: typ})
8328 unpacked = get_proper_type(unpacked.upper_bound) 8329 assert ( 8330 isinstance(unpacked, Instance) and unpacked.type.fullname == "builtins.tuple" 8331 ) 8332 normalized_items.append(unpacked.args[0])
8342 return map_instance_to_supertype(tp.fallback, base).args[0] 8343 assert False, "No Mapping base class found for TypedDict fallback" 8344 return None
8481 ret = visitor.node(defn) 8482 assert isinstance(ret, FuncItem) 8483 return ret
9031 return func.is_static
9032 assert False, f"Unexpected func type: {type(func)}"
9033
9063 first_item = defn.items[0] 9064 assert isinstance(first_item, Decorator) 9065 if not first_item.var.is_settable_property:
311 func = func or self.current_function() 312 assert func, "This method must be called from inside a function" 313 index = self.stack.index(func)
313 index = self.stack.index(func) 314 assert index, "CheckerScope stack must always start with a module" 315 enclosing = self.stack[index - 1]
404 if isinstance(node, PlaceholderNode):
405 assert False, f"PlaceholderNode {node.fullname!r} leaked to checker"
406 # Unknown reference; use any type implicitly to avoid
420 result = erasetype.erase_typevars(result) 421 assert result is not None 422 return result
545 typeddict_type = get_proper_type(typeddict_callable.ret_type) 546 assert isinstance(typeddict_type, TypedDictType) 547 return self.check_typeddict_call(
653 if fullname is None and member is not None: 654 assert object_type is not None 655 fullname = self.method_fullname(object_type, member)
690 """More precise type checking for str.format() calls on literals.""" 691 assert isinstance(e.callee, MemberExpr) 692 format_value = None
1000 """ 1001 assert info.special_alias is not None 1002 target = info.special_alias.target
1002 target = info.special_alias.target 1003 assert isinstance(target, ProperType) and isinstance(target, TypedDictType) 1004 return self.typeddict_callable_from_context(target, info.defn.type_vars)
1184 # Store inferred partial type. 1185 assert partial_type.type is not None 1186 typename = partial_type.type.fullname
1212 else: 1213 assert partial_type.type is not None 1214 typename = partial_type.type.fullname
1284 callback = self.plugin.get_function_hook(fullname) 1285 assert callback is not None # Assume that caller ensures this 1286 return callback(
1300 method_callback = self.plugin.get_method_hook(fullname) 1301 assert method_callback is not None # Assume that caller ensures this 1302 object_type = get_proper_type(object_type)
1340 else: 1341 assert isinstance(callee, Overloaded) 1342 items = []
1344 adjusted = self.apply_signature_hook(item, args, arg_kinds, arg_names, hook) 1345 assert isinstance(adjusted, CallableType) 1346 items.append(adjusted)
1435 """Check if this looks like an application of a generic function to overload argument.""" 1436 assert callee_type.variables 1437 if len(callee_type.arg_types) != 1 or len(args) != 1:
1500 if callable_name is None and member is not None: 1501 assert object_type is not None 1502 callable_name = self.method_fullname(object_type, member)
1881 if node.type is not None: 1882 assert isinstance(node.type, CallableType) 1883 return is_subtype(NoneType(), node.type.ret_type)
1997 res[i] = self.accept(args[i]) 1998 assert all(tp is not None for tp in res) 1999 return cast(list[Type], res)
2474 else: 2475 assert actual_names, "Internal error: named kinds without names given" 2476 act_name = actual_names[i]
2476 act_name = actual_names[i] 2477 assert act_name is not None 2478 act_type = actual_types[i]
2569 if isinstance(p_callee_type, TupleType): 2570 assert p_callee_type.items 2571 callee_arg_types = p_callee_type.items
2587 inner_unpack = unpacked_type.items[inner_unpack_index] 2588 assert isinstance(inner_unpack, UnpackType) 2589 inner_unpacked_type = get_proper_type(inner_unpack.type)
2599 # We assume heterogeneous tuples are desugared earlier. 2600 assert isinstance(inner_unpacked_type, Instance) 2601 assert inner_unpacked_type.type.fullname == "builtins.tuple"
2600 assert isinstance(inner_unpacked_type, Instance) 2601 assert inner_unpacked_type.type.fullname == "builtins.tuple" 2602 callee_arg_types = (
2612 else: 2613 assert isinstance(unpacked_type, Instance) 2614 assert unpacked_type.type.fullname == "builtins.tuple"
2613 assert isinstance(unpacked_type, Instance) 2614 assert unpacked_type.type.fullname == "builtins.tuple" 2615 callee_arg_types = [unpacked_type.args[0]] * len(actuals)
2620 2621 assert len(actual_types) == len(actuals) == len(actual_kinds) 2622
2629 2630 assert len(callee_arg_types) == len(actual_types) 2631 assert len(callee_arg_types) == len(callee_arg_kinds)
2630 assert len(callee_arg_types) == len(actual_types) 2631 assert len(callee_arg_types) == len(callee_arg_kinds) 2632 for actual, actual_type, actual_kind, callee_arg_type, callee_arg_kind in zip(
2930 for typ in plausible_targets: 2931 assert self.msg is self.chk.msg 2932 with self.msg.filter_errors() as w:
3115 first_union = get_proper_type(arg_types[idx]) 3116 assert isinstance(first_union, UnionType) 3117 res_items = []
3156 """Set _temporary_ type overrides for given expressions.""" 3157 assert len(exprs) == len(overrides) 3158 for expr, typ in zip(exprs, overrides):
3172 """ 3173 assert types, "Trying to merge no callables" 3174 if not all(isinstance(c, CallableType) for c in types):
3452 """Concatenate two fixed length tuples.""" 3453 assert not (find_unpack_in_list(left.items) and find_unpack_in_list(right.items)) 3454 return TupleType(
3736 3737 assert result is not None 3738 return result
4283
4284 assert e.op in ("and", "or") # Checked by visit_op_expr
4285
4322 # The boolean expression is statically known to be the left value 4323 assert left_map is not None 4324 return left_type
4326 # The boolean expression is statically known to be the right value 4327 assert right_map is not None 4328 return right_type
4501 unpack = left.items[unpack_index] 4502 assert isinstance(unpack, UnpackType) 4503 if isinstance(unpack.type, TypeVarTupleType):
4515 unpack = left.items[unpack_index] 4516 assert isinstance(unpack, UnpackType) 4517 unpacked = get_proper_type(unpack.type)
4522 bound = get_proper_type(unpacked.upper_bound) 4523 assert isinstance(bound, Instance) 4524 assert bound.type.fullname == "builtins.tuple"
4523 assert isinstance(bound, Instance) 4524 assert bound.type.fullname == "builtins.tuple" 4525 middle = bound.args[0]
4526 else: 4527 assert isinstance(unpacked, Instance) 4528 assert unpacked.type.fullname == "builtins.tuple"
4527 assert isinstance(unpacked, Instance) 4528 assert unpacked.type.fullname == "builtins.tuple" 4529 middle = unpacked.args[0]
4753 if expr.kind == REVEAL_TYPE: 4754 assert expr.expr is not None 4755 revealed_type = self.accept(
4966 # TODO: in future we may want to support type application to variadic functions. 4967 assert t.is_type_obj() 4968 info = t.type_object()
4982 tvt = vars[prefix] 4983 assert isinstance(tvt, TypeVarTupleType) 4984 start, middle, end = split_with_prefix_and_suffix(tuple(args), prefix, suffix)
5198 elif type_context and is_named_instance(type_context, TUPLE_LIKE_INSTANCE_NAMES): 5199 assert isinstance(type_context, Instance) 5200 if type_context.args:
5228 # Note: this logic depends on full structure match in tuple_context_matches(). 5229 assert type_context_items 5230 ctx_item = type_context_items[j]
5230 ctx_item = type_context_items[j] 5231 assert isinstance(ctx_item, UnpackType) 5232 ctx = ctx_item.type
5552 erased_ctx = replace_meta_vars(ctx, ErasedType()) 5553 assert isinstance(erased_ctx, ProperType) and isinstance(erased_ctx, CallableType) 5554 callable_ctx = erased_ctx
5670 5671 assert False, "unreachable" 5672
5695 method = self.chk.scope.current_function() 5696 assert method is not None 5697 if method.arguments:
6086 self.type_context.pop() 6087 assert typ is not None 6088 self.chk.store_type(node, typ)
6713 continue 6714 assert isinstance(tv, TypeVarType) 6715 unique_typevars[name] = tv
114 if rvalue is not None: 115 assert is_lvalue 116 self.rvalue = rvalue
349 if method.is_property: 350 assert isinstance(method, OverloadedFuncDef) 351 getter = method.items[0]
351 getter = method.items[0] 352 assert isinstance(getter, Decorator) 353 if mx.is_lvalue and getter.var.is_settable_property:
367 return AnyType(TypeOfAny.special_form) 368 assert isinstance(method.type, Overloaded) 369 signature = method.type
397 elif not node.is_property and node.items: 398 assert isinstance(node.items[0], Decorator) 399 unsafe_super = node.items[0].func.is_trivial_body
407 ret_type = typ.items[0].ret_type 408 assert isinstance(ret_type, ProperType) 409 if isinstance(ret_type, TupleType):
436 else:
437 assert False, f"Unexpected type {ret_type!r}"
438
543 if isinstance(v, FuncDef): 544 assert False, "Did not expect a function" 545 if isinstance(v, MypyFile):
968 typ = get_proper_type(expand_self_type(var, functype, mx.self_type)) 969 assert isinstance(typ, FunctionLike) 970 if is_trivial_self:
986 expanded = expanded.items[0] 987 assert isinstance(expanded, CallableType), expanded 988 if var.is_settable_property and mx.is_lvalue and var.setter_type is not None:
994 expanded = get_proper_type(inferred_expanded) 995 assert isinstance(expanded, CallableType) 996 if not expanded.arg_types:
1208 symnode = node.node 1209 assert isinstance(symnode, Var) 1210 return apply_class_attr_hook(
1232 if isinstance(node.node, Var): 1233 assert isuper is not None 1234 object_type = get_proper_type(mx.self_type)
1312 if is_decorated: 1313 assert isinstance(node.node, Decorator) 1314 if node.node.type:
1319 else:
1320 assert isinstance(node.node, SYMBOL_FUNCBASE_TYPES)
1321 typ = function_type(node.node, mx.named_type("builtins.function"))
1505 return cast(F, Overloaded(items)) 1506 assert isinstance(method, CallableType) 1507 if not method.arg_types:
220 else: 221 assert False 222
239 elif len(star_positions) >= 2: 240 assert False, "Parser should prevent multiple starred patterns" 241 required_patterns = len(o.patterns)
388 unpack = types[unpack_index] 389 assert isinstance(unpack, UnpackType) 390 unpacked = get_proper_type(unpack.type)
391 # This should be guaranteed by the normalization in the caller. 392 assert isinstance(unpacked, Instance) and unpacked.type.fullname == "builtins.tuple" 393 if star_pos is None:
774 proper_type = tuple_fallback(proper_type) 775 assert isinstance(proper_type, Instance) 776 empty_type = fill_typevars(proper_type.type)
801 """ 802 assert isinstance(expr, NameExpr), expr 803 node = expr.node
803 node = expr.node 804 assert isinstance(node, Var), node 805 return node
340 """ 341 assert all(s.key for s in specs), "Keys must be auto-generated first!" 342 replacements = self.find_replacements_in_call(call, [cast(str, s.key) for s in specs])
342 replacements = self.find_replacements_in_call(call, [cast(str, s.key) for s in specs]) 343 assert len(replacements) == len(specs) 344 for spec, repl in zip(specs, replacements):
346 actual_type = repl.type if isinstance(repl, TempNode) else self.chk.lookup_type(repl) 347 assert actual_type is not None 348
371 else: 372 assert isinstance(call.callee, MemberExpr) 373 if isinstance(call.callee.expr, StrExpr):
577 """ 578 assert spec.key, "Keys must be auto-generated first!" 579 if spec.field == spec.key:
580 return repl 581 assert spec.field 582
596 # These asserts are guaranteed by the original regexp. 597 assert isinstance(temp_ast, MypyFile) 598 temp_ast = temp_ast.defs[0]
598 temp_ast = temp_ast.defs[0] 599 assert isinstance(temp_ast, ExpressionStmt) 600 temp_ast = temp_ast.expr
642 if not isinstance(temp_ast.index, (NameExpr, IntExpr)): 643 assert spec.key, "Call this method only after auto-generating keys!" 644 assert spec.field
643 assert spec.key, "Call this method only after auto-generating keys!" 644 assert spec.field 645 self.msg.fail(
656 # Replace it with the actual replacement expression. 657 assert isinstance(temp_ast, (IndexExpr, MemberExpr)) # XXX: this is redundant 658 if isinstance(temp_ast, IndexExpr):
689 else: 690 assert False 691
730 unpacked = rep_types[unpack_index] 731 assert isinstance(unpacked, UnpackType) 732 unpacked = get_proper_type(unpacked.type)
734 unpacked = get_proper_type(unpacked.upper_bound) 735 assert ( 736 isinstance(unpacked, Instance) and unpacked.type.fullname == "builtins.tuple" 737 ) 738 unpack_items = [unpacked.args[0]] * extras
808 continue 809 assert specifier.key is not None 810 if specifier.key not in mapping:
813 rep_type = mapping[specifier.key] 814 assert specifier.conv_type is not None 815 expected_type = self.conversion_type(specifier.conv_type, replacements, expr)
855 else: 856 assert False, "Unreachable" 857
934 def check_type(typ: Type) -> bool: 935 assert expected_type is not None 936 ret = self.check_placeholder_type(typ, expected_type, context)
980 def check_type(type: Type) -> bool: 981 assert expected_type is not None 982 if isinstance(format_expr, BytesExpr):
144 ret = left**right 145 assert isinstance(ret, int) 146 return ret
150 def constant_fold_binary_float_op(op: str, left: int | float, right: int | float) -> float | None: 151 assert not (isinstance(left, int) and isinstance(right, int)), (op, left, right) 152 if op == "+":
173 else: 174 assert isinstance(ret, float), ret 175 return ret
141 unpack_type = callee.arg_types[i] 142 assert isinstance(unpack_type, UnpackType) 143
154 else: 155 assert False, "mypy bug: unhandled constraint inference case" 156
192 inner_unpack = unpacked_type.items[0] 193 assert isinstance(inner_unpack, UnpackType) 194 inner_unpacked_type = get_proper_type(inner_unpack.type)
208 # ...or it can be a homogeneous tuple. 209 assert ( 210 isinstance(inner_unpacked_type, Instance) 211 and inner_unpacked_type.type.fullname == "builtins.tuple" 212 ) 213 for at in actual_types[:-suffix_len]:
221 else: 222 assert False, "mypy bug: unhandled constraint inference case" 223 else:
723 # We can't do anything useful with a partial type here. 724 assert False, "Internal error" 725
728 def visit_type_var(self, template: TypeVarType) -> list[Constraint]: 729 assert False, ( 730 "Unexpected TypeVarType in ConstraintBuilderVisitor" 731 " (should have been handled in infer_constraints)" 732 ) 733
772 ) 773 assert call is not None 774 if (
825 erased = erase_typevars(template) 826 assert isinstance(erased, Instance) # type: ignore[misc] 827 # We always try nominal inference if possible,
835 # the correct corresponding type variable. 836 assert instance.type.type_var_tuple_prefix is not None 837 assert instance.type.type_var_tuple_suffix is not None
836 assert instance.type.type_var_tuple_prefix is not None 837 assert instance.type.type_var_tuple_suffix is not None 838 prefix_len = instance.type.type_var_tuple_prefix
840 tvt = instance.type.defn.type_vars[prefix_len] 841 assert isinstance(tvt, TypeVarTupleType) 842 fallback = tvt.tuple_fallback
900 # the correct corresponding type variable. 901 assert template.type.type_var_tuple_prefix is not None 902 assert template.type.type_var_tuple_suffix is not None
901 assert template.type.type_var_tuple_prefix is not None 902 assert template.type.type_var_tuple_suffix is not None 903 prefix_len = template.type.type_var_tuple_prefix
905 tvt = template.type.defn.type_vars[prefix_len] 906 assert isinstance(tvt, TypeVarTupleType) 907 fallback = tvt.tuple_fallback
1015 continue 1016 assert ( 1017 isinstance(unpacked, Instance) 1018 and unpacked.type.fullname == "builtins.tuple" 1019 ) 1020 item = unpacked.args[0]
1139 unpack = template.arg_types[unpack_present] 1140 assert isinstance(unpack, UnpackType) 1141 tuple_type = get_tuple_fallback_from_unpack(unpack)
1263 unpack_type = template.items[unpack_index] 1264 assert isinstance(unpack_type, UnpackType) 1265 unpacked_type = get_proper_type(unpack_type.type)
1270 else: 1271 assert ( 1272 isinstance(unpacked_type, Instance) 1273 and unpacked_type.type.fullname == "builtins.tuple" 1274 ) 1275 res = infer_constraints(unpacked_type, actual, self.direction)
1275 res = infer_constraints(unpacked_type, actual, self.direction) 1276 assert isinstance(actual, Instance) # ensured by is_varlength_tuple == True 1277 for i, ti in enumerate(template.items):
1284 else: 1285 assert isinstance(actual, TupleType) 1286 unpack_constraints = build_constraints_for_simple_unpack(
1298 a_unpack = actual.items[a_unpack_index] 1299 assert isinstance(a_unpack, UnpackType) 1300 a_unpacked = get_proper_type(a_unpack.type)
1311 if isinstance(a_unpacked, Instance): 1312 assert a_unpacked.type.fullname == "builtins.tuple" 1313 for tm in t_middle:
1326 # Cases above will return if actual wasn't a TupleType. 1327 assert isinstance(actual, TupleType) 1328 if len(actual_items) == len(template_items):
1366 def visit_union_type(self, template: UnionType) -> list[Constraint]: 1367 assert False, ( 1368 "Unexpected UnionType in ConstraintBuilderVisitor" 1369 " (should have been handled in infer_constraints)" 1370 ) 1371
1372 def visit_type_alias_type(self, template: TypeAliasType) -> list[Constraint]:
1373 assert False, f"This should be never called, got {template}"
1374
1384 unpacked = get_proper_type(t.type) 1385 assert isinstance(unpacked, Instance) 1386 res.extend(infer_constraints(unpacked, any_type, self.direction))
1481 return base 1482 assert False, "Invalid unpack type" 1483
1504 if isinstance(tp, TupleType): 1505 assert isinstance(tp.items[0], UnpackType) 1506 star_type = tp.items[0]
1530 template_unpack = find_unpack_in_list(template_args) 1531 assert template_unpack is not None 1532 template_prefix = template_unpack
1542 # These can't be subtypes of each-other, return fast. 1543 assert isinstance(t_unpack, UnpackType) 1544 if isinstance(t_unpack.type, TypeVarTupleType):
1576 # Add constraint(s) for variadic item when possible. 1577 assert isinstance(t_unpack, UnpackType) 1578 tp = get_proper_type(t_unpack.type)
1595 actual_unpack_type = actual_args[actual_unpack] 1596 assert isinstance(actual_unpack_type, UnpackType) 1597 a_unpacked = get_proper_type(actual_unpack_type.type)
1599 t_unpack = template_args[template_unpack] 1600 assert isinstance(t_unpack, UnpackType) 1601 tp = get_proper_type(t_unpack.type)
1661 right_by_position = right.try_synthesizing_arg_from_vararg(None) 1662 assert right_by_position is not None 1663 i = right_star.pos
1663 i = right_star.pos 1664 assert i is not None 1665 while i < len(left.arg_kinds) and left.arg_kinds[i].is_positional():
1666 left_by_position = left.argument_by_position(i) 1667 assert left_by_position is not None 1668 res.extend(
1684 right_by_name = right.try_synthesizing_arg_from_kwarg(None) 1685 assert right_by_name is not None 1686 for name in left_only_names:
1687 left_by_name = left.argument_by_name(name) 1688 assert left_by_name is not None 1689 res.extend(
127 def visit_type_alias_type(self, t: TypeAliasType) -> ProperType: 128 assert False, "only ProperTypes supported" 129
12 import os 13 import pickle 14 import sys
564 # ( see https://github.com/python/cpython/issues/74261 )
565 os.system("")
566 if "error" in response:
564 # ( see https://github.com/python/cpython/issues/74261 )
565 os.system("")
566 if "error" in response:
632 633 options_dict = pickle.loads(base64.b64decode(args.options_data)) 634 options_obj = Options()
7 import ctypes 8 import subprocess 9 from ctypes.wintypes import DWORD, HANDLE
39 if sys.platform == "win32":
40 subprocess.check_output(f"taskkill /pid {pid} /f /t")
41 else:
13 import os 14 import pickle 15 import subprocess
14 import pickle 15 import subprocess 16 import sys
43 if sys.platform == "win32": 44 from subprocess import STARTUPINFO 45
69 try: 70 subprocess.Popen(command, creationflags=0x10, startupinfo=info) # CREATE_NEW_CONSOLE 71 return 0
286 ret = method(self, **data) 287 assert isinstance(ret, dict) 288 return ret
406 else: 407 assert remove is None and update is None 408 messages = self.fine_grained_increment_follow_imports(
495 continue 496 assert state.path is not None 497 self.fswatcher.set_file_data(
511 if not state.is_fresh(): 512 assert state.path is not None 513 changed.append((state.id, state.path))
565 """ 566 assert self.fine_grained_manager is not None 567 manager = self.fine_grained_manager.manager
608 609 assert self.fine_grained_manager is not None 610 fine_grained_manager = self.fine_grained_manager
700 module_path = graph[module_id].path 701 assert module_path is not None 702 to_delete.append((module_id, module_path))
759 if nxt.path in changed_paths: 760 assert nxt.path is not None # TODO 761 changed.append((nxt.module, nxt.path))
898 path = source.path 899 assert path 900 removed.append((source.module, path))
906 for s in sources: 907 assert s.path 908 if s.path in last and last[s.path] != s.module:
970 else: 971 assert False, "Unknown inspection kind" 972 finally:
974 if "out" in result: 975 assert isinstance(result["out"], str) 976 result["out"] += "\n"
182 result = super().visit_instance(t) 183 assert isinstance(result, ProperType) and isinstance(result, Instance) 184 if t.type.fullname == "builtins.tuple":
189 if isinstance(unpacked, Instance): 190 assert unpacked.type.fullname == "builtins.tuple" 191 return unpacked
195 result = super().visit_tuple_type(t) 196 assert isinstance(result, ProperType) and isinstance(result, TupleType) 197 if len(result.items) == 1:
202 if isinstance(unpacked, Instance): 203 assert unpacked.type.fullname == "builtins.tuple" 204 if result.partial_fallback.type.fullname != "builtins.tuple":
212 result = super().visit_callable_type(t) 213 assert isinstance(result, ProperType) and isinstance(result, CallableType) 214 # Usually this is done in semanal_typeargs.py, but erasure can create
32 if sub_code_of is not None: 33 assert sub_code_of.sub_code_of is None, "Nested subcategories are not supported" 34 sub_code_map[sub_code_of.code].add(code)
151 if parent_error is not None: 152 assert severity == "note", "Only notes can specify parent errors" 153 self.parent_error = parent_error
195 last = self.errors._watchers.pop() 196 assert last == self 197 return False
227 def filtered_errors(self) -> list[ErrorInfo]: 228 assert self._filtered is not None 229 return self._filtered
575 def _add_error_info(self, file: str, info: ErrorInfo) -> None: 576 assert file not in self.flushed_files 577 # process the stack of ErrorWatchers before modifying any internal state
1384
1385 assert severity in ("error", "note")
1386 if severity == "note":
92 if instance.type.has_type_var_tuple_type: 93 assert instance.type.type_var_tuple_prefix is not None 94 assert instance.type.type_var_tuple_suffix is not None
93 assert instance.type.type_var_tuple_prefix is not None 94 assert instance.type.type_var_tuple_suffix is not None 95
102 tvar = tvars_middle[0]
103 assert isinstance(tvar, TypeVarTupleType)
104 variables = {tvar.id: TupleType(list(args_middle), tvar.tuple_fallback)}
111 for binder, arg in zip(tvars, instance_args): 112 assert isinstance(binder, TypeVarLikeType) 113 variables[binder.id] = arg
134 else: 135 assert isinstance(callee, Overloaded) 136 fresh_overload = Overloaded([freshen_function_type_vars(item) for item in callee.items])
161 result = t.accept(FreshenCallableVisitor()) 162 assert isinstance(result, type(t)) 163 return result
168 result = super().visit_callable_type(t) 169 assert isinstance(result, ProperType) and isinstance(result, CallableType) 170 return freshen_function_type_vars(result)
233 # may be called during semantic analysis before all invalid types are removed. 234 assert unpacked.type.fullname == "builtins.tuple" 235 args = list(unpacked.args)
272 elif isinstance(repl, Parameters): 273 assert t.flavor == ParamSpecFlavor.BARE 274 return Parameters(
313 def expand_unpack(self, t: UnpackType) -> list[Type]: 314 assert isinstance(t.type, TypeVarTupleType) 315 repl = get_proper_type(self.variables.get(t.type.id, t.type))
345 expanded_tuple = var_arg_type.accept(self) 346 assert isinstance(expanded_tuple, ProperType) and isinstance(expanded_tuple, TupleType) 347 expanded_items = expanded_tuple.items
433 new_item = item.accept(self) 434 assert isinstance(new_item, ProperType) 435 assert isinstance(new_item, CallableType)
434 assert isinstance(new_item, ProperType) 435 assert isinstance(new_item, CallableType) 436 items.append(new_item)
467 if isinstance(unpacked, Instance): 468 assert unpacked.type.fullname == "builtins.tuple" 469 if t.partial_fallback.type.fullname != "builtins.tuple":
474 fallback = t.partial_fallback.accept(self) 475 assert isinstance(fallback, ProperType) and isinstance(fallback, Instance) 476 return t.copy_modified(items=items, fallback=fallback)
481 fallback = t.fallback.accept(self) 482 assert isinstance(fallback, ProperType) and isinstance(fallback, Instance) 483 result = t.copy_modified(item_types=self.expand_types(t.items.values()), fallback=fallback)
231 else: 232 assert options.python_version[0] >= 3 233 feature_version = options.python_version[1]
285 286 assert isinstance(tree, MypyFile) 287 return tree
338 ignored = None 339 assert isinstance(typ, ast3.Expression) 340 converted = TypeConverter(
625 ) -> Block: 626 assert stmts # must be non-empty 627 b = Block(
724 popped = ret.pop() 725 assert isinstance(popped, IfStmt) 726 skipped_if_stmts.append(popped)
952 func_type_ast = ast3_parse(n.type_comment, "<func_type>", "func_type") 953 assert isinstance(func_type_ast, FunctionType) 954 # for ellipsis arg
1307 typ = TypeConverter(self.errors, line=line).visit(n.annotation) 1308 assert typ is not None 1309 typ.column = n.annotation.col_offset
1445 def visit_ImportFrom(self, n: ast3.ImportFrom) -> ImportBase: 1446 assert n.level is not None 1447 if len(n.names) == 1 and n.names[0].name == "*":
1498 # mypy translates (1 and 2 and 3) as (1 and (2 and 3)) 1499 assert len(n.values) >= 2 1500 op_node = n.op
1725 def visit_Interpolation(self, n: ast_Interpolation) -> Expression: 1726 assert False, "Unreachable" 1727
1800 stars = [p for p in patterns if isinstance(p, StarredPattern)] 1801 assert len(stars) < 2 1802
1828 class_ref = self.visit(n.cls) 1829 assert isinstance(class_ref, RefExpr) 1830 positionals = [self.visit(p) for p in n.patterns]
1948 typ = visitor(node) 1949 assert isinstance(typ, ProperType) 1950 return typ
1991 converted = self.visit(arg) 1992 assert converted is not None 1993 typ = converted
2015 converted = self.visit(value) 2016 assert converted is not None 2017 typ = converted
2116 value = self.visit(cast(Any, n).value) 2117 assert isinstance(value, Type) 2118 return value
2187 def visit_List(self, n: ast3.List) -> Type: 2188 assert isinstance(n.ctx, ast3.Load) 2189 result = self.translate_argument_list(n.elts)
106 def is_explicit_package_base(self, path: str) -> bool: 107 assert self.explicit_package_bases 108 return normalise_package_base(path) in self.explicit_package_bases
149 else: 150 assert stnode.node is not None, (table_fullname + "." + key, cross_ref) 151 value.node = stnode.node
152 elif not self.allow_missing:
153 assert False, f"Could not find cross-ref {cross_ref}"
154 else:
163 else:
164 assert False, f"Unexpected empty node {key!r}: {value}"
165
387 # Looks like a missing TypeInfo during an initial daemon load, put something there
388 assert (
389 allow_missing
390 ), "Should never get here in normal mode, got {}:{} instead of TypeInfo".format(
391 type(node).__name__, node.fullname if node else ""
392 )
393 return missing_info(modules)
411 else: 412 assert allow_missing 413 return missing_alias()
417 # Looks like a missing TypeAlias during an initial daemon load, put something there
418 assert (
419 allow_missing
420 ), "Should never get here in normal mode, got {}:{} instead of TypeAlias".format(
421 type(node).__name__, node.fullname if node else ""
422 )
423 return missing_alias()
146 dirname, basename = os.path.split(path) 147 assert basename == "__init__.py", path 148 assert not os.path.exists(path), path # Not cached!
147 assert basename == "__init__.py", path 148 assert not os.path.exists(path), path # Not cached! 149 dirname = os.path.normpath(dirname)
22 if phase == "start": 23 assert self.gc_start_time is None, "Start phase out of sequence" 24 self.gc_start_time = time.time()
25 elif phase == "stop": 26 assert self.gc_start_time is not None, "Stop phase out of sequence" 27 self.gc_calls += 1
32 else:
33 assert False, f"Unrecognized gc phase ({phase!r})"
34
6 import os 7 import subprocess 8
17 try: 18 subprocess.check_output(["git", "--help"]) 19 return True
17 try: 18 subprocess.check_output(["git", "--help"]) 19 return True
27 """Get the SHA-1 of the HEAD of a git repository.""" 28 return subprocess.check_output(["git", "rev-parse", "HEAD"], cwd=dir).strip() 29
27 """Get the SHA-1 of the HEAD of a git repository.""" 28 return subprocess.check_output(["git", "rev-parse", "HEAD"], cwd=dir).strip() 29
32 """Check whether a git repository has uncommitted changes.""" 33 output = subprocess.check_output(["git", "status", "-uno", "--porcelain"], cwd=dir) 34 return output.strip() != b""
32 """Check whether a git repository has uncommitted changes.""" 33 output = subprocess.check_output(["git", "status", "-uno", "--porcelain"], cwd=dir) 34 return output.strip() != b""
116 data = {item: (dep - ready) for item, dep in data.items() if item not in ready}
117 assert not data, f"A cyclic dependency exists amongst {data!r}"
94 if method.is_property: 95 assert isinstance(method, OverloadedFuncDef) 96 dec = method.items[0]
96 dec = method.items[0] 97 assert isinstance(dec, Decorator) 98 return dec.var
223 self.fg_manager.flush_cache() 224 assert state.path is not None 225 self.fg_manager.update([(state.id, state.path)], [])
245 builtins = self.fg_manager.graph["builtins"].tree 246 assert builtins is not None 247 object_node = builtins.names["object"].node
247 object_node = builtins.names["object"].node 248 assert isinstance(object_node, TypeInfo) 249 return Instance(object_node, [])
267 # First gather all attributes for every union variant. 268 assert instances 269 all_attrs = []
449 modules, reload_needed = self.modules_for_nodes(nodes, expression) 450 assert not reload_needed 451
575 if state is None: 576 assert err_dict 577 return err_dict
581 self.reload_module(state) 582 assert state.tree is not None 583
589 ) 590 assert len(pos) == 2 591 # Inexact location, return all expressions.
144 bytes_written, err = ov.GetOverlappedResult(True) 145 assert err == 0, err 146 assert bytes_written == len(encoded_data)
145 assert err == 0, err 146 assert bytes_written == len(encoded_data) 147 except OSError as e:
263 res = _winapi.WaitForSingleObject(ov.event, timeout) 264 assert res == _winapi.WAIT_OBJECT_0 265 except BaseException:
269 _, err = ov.GetOverlappedResult(True) 270 assert err == 0 271 else:
311 name = self.sock.getsockname() 312 assert isinstance(name, str) 313 return name
79 # instances. All the heavy lifting is done in the join of tuple types. 80 assert s.type.type_var_tuple_prefix is not None 81 assert s.type.type_var_tuple_suffix is not None
80 assert s.type.type_var_tuple_prefix is not None 81 assert s.type.type_var_tuple_suffix is not None 82 prefix = s.type.type_var_tuple_prefix
84 tvt = s.type.defn.type_vars[prefix] 85 assert isinstance(tvt, TypeVarTupleType) 86 fallback = tvt.tuple_fallback
130 if isinstance(new_type, Instance): 131 assert new_type.type.fullname == "builtins.tuple" 132 new_type = UnpackType(new_type)
133 else: 134 assert isinstance(new_type, TupleType) 135 args.extend(new_type.items)
141 new_type = join_types(ta, sa, self) 142 assert new_type is not None 143 args.append(new_type)
187 best = res 188 assert best is not None 189 for promote in t.type._promote:
473 s_unpack = s.items[s_unpack_index] 474 assert isinstance(s_unpack, UnpackType) 475 s_unpacked = get_proper_type(s_unpack.type)
476 t_unpack = t.items[t_unpack_index] 477 assert isinstance(t_unpack, UnpackType) 478 t_unpacked = get_proper_type(t_unpack.type)
494 if isinstance(t_unpacked, Instance): 495 assert t_unpacked.type.fullname == "builtins.tuple" 496 tuple_instance = t_unpacked
497 else: 498 assert isinstance(t_unpacked, TypeVarTupleType) 499 tuple_instance = t_unpacked.tuple_fallback
516 return None 517 assert s_unpacked.type.fullname == "builtins.tuple" 518 assert t_unpacked.type.fullname == "builtins.tuple"
517 assert s_unpacked.type.fullname == "builtins.tuple" 518 assert t_unpacked.type.fullname == "builtins.tuple" 519 mid_joined = join_types(s_unpacked.args[0], t_unpacked.args[0])
531 else: 532 assert t_unpack_index is not None 533 variadic = t
538 unpack = variadic.items[unpack_index] 539 assert isinstance(unpack, UnpackType) 540 unpacked = get_proper_type(unpack.type)
579 ) 580 assert isinstance(fallback, Instance) 581 items = self.join_tuples(self.s, t)
635 # never get here. 636 assert False, "Internal error" 637
646 def visit_type_alias_type(self, t: TypeAliasType) -> ProperType:
647 assert False, f"This should be never called, got {t}"
648
788 else: 789 assert isinstance(unpacked, Instance) and unpacked.type.fullname == "builtins.tuple" 790 fallback_type = unpacked.type
893 def unpack_callback_protocol(t: Instance) -> ProperType | None: 894 assert t.type.is_protocol 895 if t.type.protocol_members == ["__call__"]:
32 if raise_on_missing:
33 assert "." in head, f"Cannot find module for {name}"
34 return None
44 if raise_on_missing:
45 assert rest, f"Cannot find {name}, got a module symbol"
46 return None
52 if raise_on_missing:
53 assert key in names, f"Cannot find component {key!r} for {name!r}"
54 return None
62 if raise_on_missing:
63 assert node, f"Cannot find {name}"
64 return None
6 import os 7 import subprocess 8 import sys
305 sys_exe = ( 306 subprocess.check_output( 307 python_executable_prefix(str_ver) + ["-c", "import sys; print(sys.executable)"], 308 stderr=subprocess.STDOUT, 309 ) 310 .decode()
520 if strict_flag: 521 assert dest is not None 522 strict_flag_names.append(flag)
1574 parser.error("--package-root does not work here (no fscache)")
1575 assert fscache is not None # Since mypy doesn't know parser.error() raises.
1576 # Do some stuff with drive letters to make Windows happy (esp. tests).
1695 print("Installing missing stub packages:")
1696 assert options.python_executable, "Python executable required to install types"
1697 cmd = [options.python_executable, "-m", "pip", "install"] + packages
1705 print() 1706 subprocess.run(cmd) 1707 return True
23 alias = instance.type.special_alias 24 assert alias is not None 25 if not alias._is_recursive:
96 t = expand_type_by_instance(b, instance) 97 assert isinstance(t, Instance) 98 result.append(t)
161 # avoid full expansion and make it faster. 162 assert isinstance(narrowed, UnionType) 163 return make_simplified_union(
353 if isinstance(left, PartialType) or isinstance(right, PartialType): 354 assert False, "Unexpectedly encountered partial type" 355
550 if isinstance(get_proper_type(call), FunctionLike): 551 assert call is not None and other is not None 552 return _is_overlapping_types(call, other)
594 # Similar to subtyping, we delegate the heavy lifting to the tuple overlap. 595 assert right.type.type_var_tuple_prefix is not None 596 assert right.type.type_var_tuple_suffix is not None
595 assert right.type.type_var_tuple_prefix is not None 596 assert right.type.type_var_tuple_suffix is not None 597 prefix = right.type.type_var_tuple_prefix
599 tvt = right.type.defn.type_vars[prefix] 600 assert isinstance(tvt, TypeVarTupleType) 601 fallback = tvt.tuple_fallback
641
642 assert type(left) != type(right), f"{type(left)} vs {type(right)}"
643 return False
688 right = adjust_tuple(right, left) or right
689 assert isinstance(left, TupleType), f"Type {left} is not a tuple"
690 assert isinstance(right, TupleType), f"Type {right} is not a tuple"
689 assert isinstance(left, TupleType), f"Type {left} is not a tuple"
690 assert isinstance(right, TupleType), f"Type {right} is not a tuple"
691
731 # Nested non-variadic tuples should be normalized at this point. 732 assert isinstance(unpacked, Instance) 733 instance = unpacked
733 instance = unpacked 734 assert instance.type.fullname == "builtins.tuple" 735 new_items.extend([instance.args[0]] * extra)
858 s = self.s 859 assert s.type.type_var_tuple_prefix is not None 860 assert s.type.type_var_tuple_suffix is not None
859 assert s.type.type_var_tuple_prefix is not None 860 assert s.type.type_var_tuple_suffix is not None 861 prefix = s.type.type_var_tuple_prefix
863 tvt = s.type.defn.type_vars[prefix] 864 assert isinstance(tvt, TypeVarTupleType) 865 fallback = tvt.tuple_fallback
885 else: 886 assert isinstance(meet, UninhabitedType) 887 meet = UnpackType(tv.tuple_fallback.copy_modified(args=[meet]))
1004 s_unpack = s.items[unpack_index] 1005 assert isinstance(s_unpack, UnpackType) 1006 s_unpacked = get_proper_type(s_unpack.type)
1007 t_unpack = t.items[unpack_index] 1008 assert isinstance(t_unpack, UnpackType) 1009 t_unpacked = get_proper_type(t_unpack.type)
1027 else: 1028 assert t_unpack_index is not None 1029 variadic = t
1033 unpack = variadic.items[unpack_index] 1034 assert isinstance(unpack, UnpackType) 1035 unpacked = get_proper_type(unpack.type)
1087 # at least one of s_item_type and t_item_type is not None 1088 assert t_item_type is not None 1089 item_list.append((item_name, t_item_type))
1109 # We can't determine the meet of partial types. We should never get here. 1110 assert False, "Internal error" 1111
1125 def visit_type_alias_type(self, t: TypeAliasType) -> ProperType:
1126 assert False, f"This should be never called, got {t}"
1127
1182 left, right = get_proper_types((left, right)) 1183 assert not isinstance(left, TypedDictType) or not isinstance(right, TypedDictType) 1184
1228 left, right = get_proper_types((left, right)) 1229 assert not isinstance(left, TypedDictType) or not isinstance(right, TypedDictType) 1230
1231 if isinstance(left, TypedDictType): 1232 assert isinstance(right, Instance) 1233 typed, other = left, right
1234 else: 1235 assert isinstance(left, Instance) 1236 assert isinstance(right, TypedDictType)
1235 assert isinstance(left, Instance) 1236 assert isinstance(right, TypedDictType) 1237 typed, other = right, left
275 if secondary_context is not None: 276 assert origin_span is not None 277 origin_span = itertools.chain(origin_span, span_from_context(secondary_context))
804 arg_name = callee.arg_names[m - 1] 805 assert arg_name is not None 806 arg_type_str, expected_type_str = format_type_distinctly(
1043 if module: 1044 assert callee.definition is not None 1045 fname = callable_name(callee)
2260 else: 2261 assert isinstance(exp, Overloaded) 2262 self.pretty_overload(
2277 else: 2278 assert isinstance(got, Overloaded) 2279 self.pretty_overload(
2841 else: 2842 assert isinstance(t, TypeVarLikeType) 2843 d.setdefault(t.name, set()).add(scoped_type_var_name(t))
3071 """ 3072 assert right.type.is_protocol 3073 missing: list[str] = []
3087 """ 3088 assert right.type.is_protocol 3089 conflicts: list[tuple[str, Type, Type, bool]] = []
3093 supertype = find_member(member, right, left) 3094 assert supertype is not None 3095 subtype = mypy.typeops.get_protocol_member(left, member, class_obj)
3117 continue 3118 assert supertype is not None and subtype is not None 3119 is_compat = is_subtype(supertype, subtype, options=options)
3130 """ 3131 assert right.type.is_protocol 3132 all_flags: list[tuple[str, set[int], set[int]]] = []
3188 def format_string_list(lst: list[str]) -> str: 3189 assert lst 3190 if len(lst) == 1:
3248 pass 3249 assert False, "Couldn't determine module from CallableType" 3250 return None
90 def read(self, name: str) -> bytes: 91 assert os.path.normpath(name) != os.path.abspath(name), "Don't use absolute paths!" 92
99 def write(self, name: str, data: bytes, mtime: float | None = None) -> bool: 100 assert os.path.normpath(name) != os.path.abspath(name), "Don't use absolute paths!" 101
172
173 cur = self.db.execute(f"SELECT {field} FROM files2 WHERE path = ?", (name,))
174 results = cur.fetchall()
176 raise FileNotFoundError() 177 assert len(results) == 1 178 return results[0][0]
181 mtime = self._query(name, "mtime") 182 assert isinstance(mtime, float) 183 return mtime
186 data = self._query(name, "data") 187 assert isinstance(data, bytes) 188 return data
66 super().visit_namedtuple_expr(o) 67 assert o.info.tuple_type 68 o.info.tuple_type.accept(self)
12 import re 13 import subprocess 14 import sys
114 else: 115 assert False 116 return msg, notes
857 sys_path, site_packages = ast.literal_eval( 858 subprocess.check_output( 859 [python_executable, pyinfo.__file__, "getsearchdirs"], 860 env=env, 861 stderr=subprocess.PIPE, 862 ).decode() 863 )
868 except OSError as err: 869 assert err.errno is not None 870 reason = os.strerror(err.errno)
942 for site in site_packages: 943 assert site not in lib_path 944 if (
977 versions_path = os_path_join(stdlib_dir, "VERSIONS") 978 assert os.path.isfile(versions_path), (custom_typeshed_dir, versions_path, __file__) 979 with open(versions_path) as f:
15 mro = linearize_hierarchy(info, obj_type)
16 assert mro, f"Could not produce a MRO at all for {info}"
17 info.mro = mro
39 for base in bases:
40 assert base is not None, f"Cannot linearize bases for {info.fullname} {bases}"
41 lin_bases.append(linearize_hierarchy(base, obj_type))
198 a = self.accept(mypy.strconv.StrConv(options=options)) 199 assert a 200 return a
410 def deserialize(cls, data: JsonDict) -> MypyFile: 411 assert data[".class"] == "MypyFile", data 412 tree = MypyFile([], [])
432 def read(cls, data: Buffer) -> MypyFile: 433 assert read_tag(data) == MYPY_FILE 434 tree = MypyFile([], [])
641 # This may happen for malformed overload 642 assert self.impl is not None 643 return self.impl.name
671 first_item = self.items[0] 672 assert isinstance(first_item, Decorator) 673 assert first_item.var.is_settable_property
672 assert isinstance(first_item, Decorator) 673 assert first_item.var.is_settable_property 674 assert self.setter_index is not None
673 assert first_item.var.is_settable_property 674 assert self.setter_index is not None 675 item = self.items[self.setter_index]
675 item = self.items[self.setter_index] 676 assert isinstance(item, Decorator) 677 return item
695 def deserialize(cls, data: JsonDict) -> OverloadedFuncDef: 696 assert data[".class"] == "OverloadedFuncDef" 697 res = OverloadedFuncDef(
706 typ = mypy.types.deserialize_type(data["type"]) 707 assert isinstance(typ, mypy.types.ProperType) 708 res.type = typ
736 if typ is not None: 737 assert isinstance(typ, mypy.types.ProperType) 738 res.type = typ
995 def deserialize(cls, data: JsonDict) -> FuncDef: 996 assert data[".class"] == "FuncDef" 997 body = Block([])
1133 def deserialize(cls, data: JsonDict) -> Decorator: 1134 assert data[".class"] == "Decorator" 1135 dec = Decorator(FuncDef.deserialize(data["func"]), [], Var.deserialize(data["var"]))
1146 def read(cls, data: Buffer) -> Decorator: 1147 assert read_tag(data) == FUNC_DEF 1148 func = FuncDef.read(data)
1148 func = FuncDef.read(data) 1149 assert read_tag(data) == VAR 1150 var = Var.read(data)
1309 def deserialize(cls, data: JsonDict) -> Var: 1310 assert data[".class"] == "Var" 1311 name = data["name"]
1318 v = Var(name, type) 1319 assert ( 1320 setter_type is None 1321 or isinstance(setter_type, mypy.types.ProperType) 1322 and isinstance(setter_type, mypy.types.CallableType) 1323 ) 1324 v.setter_type = setter_type
1346 if read_bool(data): 1347 assert read_tag(data) == mypy.types.CALLABLE_TYPE 1348 setter_type = mypy.types.CallableType.read(data)
1456 def deserialize(cls, data: JsonDict) -> ClassDef: 1457 assert data[".class"] == "ClassDef" 1458 res = ClassDef(
1914 super().__init__() 1915 assert len(patterns) == len(guards) == len(bodies) 1916 self.subject = subject
2150 def serialize(self) -> JsonDict:
2151 assert False, f"Serializing NameExpr: {self}"
2152
2550 ret = self.body.body[-1] 2551 assert isinstance(ret, ReturnStmt) 2552 expr = ret.expr
2552 expr = ret.expr 2553 assert expr is not None # lambda can't have empty body 2554 return expr
2892 def deserialize(cls, data: JsonDict) -> TypeVarExpr: 2893 assert data[".class"] == "TypeVarExpr" 2894 return TypeVarExpr(
2943 def deserialize(cls, data: JsonDict) -> ParamSpecExpr: 2944 assert data[".class"] == "ParamSpecExpr" 2945 return ParamSpecExpr(
3010 def deserialize(cls, data: JsonDict) -> TypeVarTupleExpr: 3011 assert data[".class"] == "TypeVarTupleExpr" 3012 return TypeVarTupleExpr(
3031 def read(cls, data: Buffer) -> TypeVarTupleExpr: 3032 assert read_tag(data) == mypy.types.INSTANCE 3033 fallback = mypy.types.Instance.read(data)
3525 if isinstance(vd, mypy.types.TypeVarTupleType): 3526 assert not self.has_type_var_tuple_type 3527 self.has_type_var_tuple_type = True
3562 members: set[str] = set() 3563 assert self.mro, "This property can be only accessed after MRO is (re-)calculated" 3564 for base in self.mro[:-1]: # we skip "object" since everyone implements it
3860 t = mypy.types.deserialize_type(p) 3861 assert isinstance(t, mypy.types.ProperType) 3862 _promote.append(t)
3949 names = SymbolTable.read(data) 3950 assert read_tag(data) == CLASS_DEF 3951 defn = ClassDef.read(data)
3961 for _ in range(read_int(data)): 3962 assert read_tag(data) == mypy.types.INSTANCE 3963 ti.bases.append(mypy.types.Instance.read(data))
3976 if read_bool(data): 3977 assert read_tag(data) == mypy.types.INSTANCE 3978 ti.alt_promote = mypy.types.Instance.read(data)
3979 if read_bool(data): 3980 assert read_tag(data) == mypy.types.INSTANCE 3981 ti.declared_metaclass = mypy.types.Instance.read(data)
3982 if read_bool(data): 3983 assert read_tag(data) == mypy.types.INSTANCE 3984 ti.metaclass_type = mypy.types.Instance.read(data)
3985 if read_bool(data): 3986 assert read_tag(data) == mypy.types.TUPLE_TYPE 3987 ti.tuple_type = mypy.types.TupleType.read(data)
3988 if read_bool(data): 3989 assert read_tag(data) == mypy.types.TYPED_DICT_TYPE 3990 ti.typeddict_type = mypy.types.TypedDictType.read(data)
3998 if read_bool(data): 3999 assert read_tag(data) == mypy.types.TYPE_VAR_TYPE 4000 ti.self_type = mypy.types.TypeVarType.read(data)
4186 """ 4187 assert info.tuple_type 4188 # TODO: is it possible to refactor this to set the correct type vars here?
4207 """ 4208 assert info.typeddict_type 4209 # TODO: is it possible to refactor this to set the correct type vars here?
4252 def deserialize(cls, data: JsonDict) -> TypeAlias: 4253 assert data[".class"] == "TypeAlias" 4254 fullname = data["fullname"]
4255 alias_tvars = [mypy.types.deserialize_type(v) for v in data["alias_tvars"]] 4256 assert all(isinstance(t, mypy.types.TypeVarLikeType) for t in alias_tvars) 4257 target = mypy.types.deserialize_type(data["target"])
4369 def serialize(self) -> JsonDict: 4370 assert False, "PlaceholderNode can't be serialized" 4371
4520 else:
4521 assert self.node is not None, f"{prefix}:{name}"
4522 if prefix is not None:
4528 ):
4529 assert not isinstance(
4530 self.node, PlaceholderNode
4531 ), f"Definition of {fullname} is unexpectedly incomplete"
4532 data["cross_ref"] = fullname
4538 def deserialize(cls, data: JsonDict) -> SymbolTableNode: 4539 assert data[".class"] == "SymbolTableNode" 4540 kind = inverse_node_kinds[data["kind"]]
4545 else: 4546 assert "node" in data, data 4547 node = SymbolNode.deserialize(data["node"])
4569 else:
4570 assert self.node is not None, f"{prefix}:{name}"
4571 if prefix is not None:
4577 ):
4578 assert not isinstance(
4579 self.node, PlaceholderNode
4580 ), f"Definition of {fullname} is unexpectedly incomplete"
4581 cross_ref = fullname
4584 if cross_ref is None: 4585 assert self.node is not None 4586 self.node.write(data)
4645 def deserialize(cls, data: JsonDict) -> SymbolTable: 4646 assert data[".class"] == "SymbolTable" 4647 st = SymbolTable()
4914 return mypy.nodes.TypeVarTupleExpr.read(data)
4915 assert False, f"Unknown symbol tag {tag}"
4916
4923 return FuncDef.read(data)
4924 assert False, f"Invalid tag for an OverloadPart {tag}"
577 self.build_per_module_cache() 578 assert self._per_module_cache is not None 579
107 def record_definition(self, name: str) -> None: 108 assert len(self.branches) > 0 109 self.branches[-1].must_be_defined.add(name)
112 def delete_var(self, name: str) -> None: 113 assert len(self.branches) > 0 114 self.branches[-1].must_be_defined.discard(name)
117 def record_nested_branch(self, state: BranchState) -> None: 118 assert len(self.branches) > 0 119 current_branch = self.branches[-1]
127 def skip_branch(self) -> None: 128 assert len(self.branches) > 0 129 self.branches[-1].skipped = True
131 def is_possibly_undefined(self, name: str) -> bool: 132 assert len(self.branches) > 0 133 return name in self.branches[-1].may_be_defined
135 def is_undefined(self, name: str) -> bool: 136 assert len(self.branches) > 0 137 branch = self.branches[-1]
140 def is_defined_in_a_branch(self, name: str) -> bool: 141 assert len(self.branches) > 0 142 for b in self.branches:
216 def _scope(self) -> Scope: 217 assert len(self.scopes) > 0 218 return self.scopes[-1]
220 def enter_scope(self, scope_type: ScopeType) -> None: 221 assert len(self._scope().branch_stmts) > 0 222 initial_state = None
234 def start_branch_statement(self) -> None: 235 assert len(self._scope().branch_stmts) > 0 236 self._scope().branch_stmts.append(
240 def next_branch(self) -> None: 241 assert len(self._scope().branch_stmts) > 1 242 self._scope().branch_stmts[-1].next_branch()
244 def end_branch_statement(self) -> None: 245 assert len(self._scope().branch_stmts) > 1 246 result = self._scope().branch_stmts.pop().done()
254 def record_definition(self, name: str) -> None: 255 assert len(self.scopes) > 0 256 assert len(self.scopes[-1].branch_stmts) > 0
255 assert len(self.scopes) > 0 256 assert len(self.scopes[-1].branch_stmts) > 0 257 self._scope().branch_stmts[-1].record_definition(name)
259 def delete_var(self, name: str) -> None: 260 assert len(self.scopes) > 0 261 assert len(self.scopes[-1].branch_stmts) > 0
260 assert len(self.scopes) > 0 261 assert len(self.scopes[-1].branch_stmts) > 0 262 self._scope().branch_stmts[-1].delete_var(name)
265 """Records an undefined reference. These can later be retrieved via `pop_undefined_ref`.""" 266 assert len(self.scopes) > 0 267 self._scope().record_undefined_ref(o)
270 """If name has previously been reported as undefined, the NameExpr that was called will be returned.""" 271 assert len(self.scopes) > 0 272 return self._scope().pop_undefined_ref(name)
274 def is_possibly_undefined(self, name: str) -> bool: 275 assert len(self._scope().branch_stmts) > 0 276 # A variable is undefined if it's in a set of `may_be_defined` but not in `must_be_defined`.
280 """This will return true if a variable is defined in a branch that's not the current branch.""" 281 assert len(self._scope().branch_stmts) > 0 282 stmt = self._scope().branch_stmts[-1]
290 def is_undefined(self, name: str) -> bool: 291 assert len(self._scope().branch_stmts) > 0 292 return self._scope().branch_stmts[-1].is_undefined(name)
330 if builtins_mod: 331 assert isinstance(builtins_mod.node, MypyFile) 332 self.builtins = builtins_mod.node.names
565 if len(o.handlers) > 0: 566 assert len(o.handlers) == len(o.vars) == len(o.types) 567 for i in range(len(o.handlers)):
117 super().__init__() 118 assert len(keys) == len(values) 119 self.keys = keys
142 super().__init__() 143 assert len(keyword_keys) == len(keyword_values) 144 self.class_ref = class_ref
545 def lookup_fully_qualified(self, fullname: str) -> SymbolTableNode | None: 546 assert self._modules is not None 547 return lookup_fully_qualified(fullname, self._modules)
135 """Return this attribute as an argument to __init__.""" 136 assert self.init 137 init_type: Type | None = None
173 node = self.info[self.name].node 174 assert node is not None 175 ctx.api.msg.need_annotation_for_var(node, self.context)
460 continue 461 assert isinstance(node, Var), node 462 node.is_initialized_in_class = False
631 # auto_attribs requires an annotation on *every* attr.ib. 632 assert lhs.node is not None 633 ctx.api.msg.need_annotation_for_var(lhs.node, stmt)
1179 ret_type = cls.names[MAGIC_ATTR_NAME].type 1180 assert ret_type is not None 1181 return ctx.default_signature.copy_modified(arg_types=arg_types, ret_type=ret_type)
89 return None 90 assert argument.name 91
264 """Adds a new overloaded method to a class definition.""" 265 assert len(items) >= 2, "Overloads must contain at least two cases" 266
306 info = cls.info 307 assert info 308
334 335 assert not ( 336 is_classmethod is True and is_staticmethod is True 337 ), "Can't add a new method that's both staticmethod and classmethod." 338
355 for arg in args: 356 assert arg.type_annotation, "All arguments must be fully typed." 357 arg_types.append(arg.type_annotation)
37 ) 38 assert len(simplecdata_base.args) == 1, "_SimpleCData takes exactly one type argument" 39 return get_proper_type(simplecdata_base.args[0])
93 # the auto-unboxed type is the single type argument of the _SimpleCData type. 94 assert len(base.args) == 1 95 return get_proper_type(base.args[0])
104 if isinstance(tp, Instance): 105 assert tp.type.fullname == "_ctypes.Array" 106 if len(tp.args) == 1:
117 allowed = _autoconvertible_to_cdata(et, ctx.api) 118 assert ( 119 len(ctx.arg_types) == 1 120 ), "The stub of the ctypes.Array constructor should have a single vararg parameter" 121 for arg_num, (arg_kind, arg_type) in enumerate(zip(ctx.arg_kinds[0], ctx.arg_types[0]), 1):
153 unboxed = _autounboxed_cdata(et) 154 assert ( 155 len(ctx.arg_types) == 1 156 ), "The stub of ctypes.Array.__getitem__ should have exactly one parameter" 157 assert (
156 ), "The stub of ctypes.Array.__getitem__ should have exactly one parameter" 157 assert ( 158 len(ctx.arg_types[0]) == 1 159 ), "ctypes.Array.__getitem__'s parameter should not be variadic" 160 index_type = get_proper_type(ctx.arg_types[0][0])
173 allowed = _autoconvertible_to_cdata(et, ctx.api) 174 assert len(ctx.default_signature.arg_types) == 2 175 index_type = get_proper_type(ctx.default_signature.arg_types[0])
176 def serialize(self) -> JsonDict:
177 assert self.type
178 return {
333 if existing_method is not None and not existing_method.plugin_generated: 334 assert existing_method.node 335 self._api.fail(
497 # Nodes of superclass InitVars not used in __init__ cannot be reached. 498 assert attr.is_init_var 499 for stmt in info.defn.defs.body:
595 node = sym.node 596 assert not isinstance(node, PlaceholderNode) 597
611 612 assert isinstance(node, Var), node 613
860 return True, {}
861 assert name is not None
862 args[name] = arg
926 super_info = t.type.get_containing_type_info("__set__")
927 assert super_info
928 if setter.type:
987 spec = find_dataclass_transform_spec(reason) 988 assert spec is not None, ( 989 "trying to find dataclass transform spec, but reason is neither dataclasses.dataclass nor " 990 "decorated with typing.dataclass_transform" 991 ) 992 return spec
1036 replace_sig = replace_sym.type 1037 assert isinstance(replace_sig, ProperType) 1038 assert isinstance(replace_sig, CallableType)
1037 assert isinstance(replace_sig, ProperType) 1038 assert isinstance(replace_sig, CallableType) 1039 return [expand_type_by_instance(replace_sig, typ)]
1114 return 1115 assert isinstance(defn.type, FunctionLike) 1116
1117 ideal_sig_method = info.get_method(_INTERNAL_POST_INIT_SYM_NAME) 1118 assert ideal_sig_method is not None and ideal_sig_method.type is not None 1119 ideal_sig = ideal_sig_method.type
1119 ideal_sig = ideal_sig_method.type 1120 assert isinstance(ideal_sig, ProperType) # we set it ourselves 1121 assert isinstance(ideal_sig, CallableType)
1120 assert isinstance(ideal_sig, ProperType) # we set it ourselves 1121 assert isinstance(ideal_sig, CallableType) 1122 ideal_sig = ideal_sig.copy_modified(name="__post_init__")
246 tv = signature.variables[0] 247 assert isinstance(tv, TypeVarType) 248 return signature.copy_modified(
317 tv = signature.variables[0] 318 assert isinstance(tv, TypeVarType) 319 typ = make_simplified_union([value_type, tv])
100 return proper_type 101 assert isinstance(ctx.type, Instance), "An incorrect ctx.type was passed." 102 info = ctx.type.type
124 def _is_defined_in_stub(ctx: mypy.plugin.AttributeContext) -> bool: 125 assert isinstance(ctx.api, TypeCheckerSharedApi) 126 return isinstance(ctx.type, Instance) and ctx.api.is_defined_in_stub(ctx.type)
252 253 assert isinstance(ctx.type, Instance) 254 info = ctx.type.type
297 # as a string. 298 assert isinstance(underlying_literal.value, str) 299 return underlying_literal.value
291 else: 292 assert actuals 293 if any(actual_arg_kinds[j] in (ArgKind.ARG_POS, ArgKind.ARG_STAR) for j in actuals):
322 if partially_applied.param_spec(): 323 assert ret.extra_attrs is not None # copy_with_extra_attr above ensures this 324 attrs = ret.extra_attrs.copy()
168 checker = ctx.api 169 assert isinstance(checker, TypeChecker) 170 types = checker.modules["mypy.types"]
171 proper_type_info = types.names["ProperType"] 172 assert isinstance(proper_type_info.node, TypeInfo) 173 return Instance(proper_type_info.node, [])
109 singledispatch_obj = get_proper_type(ctx.default_return_type) 110 assert isinstance(singledispatch_obj, Instance) 111 singledispatch_obj.args += (func_type,)
117 """Called for functools._SingleDispatchCallable.register""" 118 assert isinstance(ctx.type, Instance) 119 # TODO: check that there's only one argument
28 try: 29 from lxml import etree # type: ignore[import-untyped] 30
255 row_len = len(header) 256 assert all(len(row) == row_len for row in rows + [header, footer]) 257 min_column_distance = 3 # minimum distance between numbers in two columns
473 xsd_path = os.path.join(reports.data_dir, "xml", "mypy.xsd") 474 self.schema = etree.XMLSchema(etree.parse(xsd_path)) 475 self.last_xml: Any | None = None
713 memory_reporter = reports.add_report("memory-xml", "<memory>")
714 assert isinstance(memory_reporter, MemoryXmlReporter)
715 # The dependency will be called first.
747 last_xml = self.memory_xml.last_xml 748 assert last_xml is not None 749 out_path = os.path.join(self.output_dir, "index.xml")
770
771 self.xslt_html = etree.XSLT(etree.parse(self.memory_xml.xslt_html_path))
772 self.param_html = etree.XSLT.strparam("html")
794 last_xml = self.memory_xml.last_xml 795 assert last_xml is not None 796 out_path = os.path.join(self.output_dir, "index.html")
816 817 self.xslt_txt = etree.XSLT(etree.parse(self.memory_xml.xslt_txt_path)) 818
829 last_xml = self.memory_xml.last_xml 830 assert last_xml is not None 831 out_path = os.path.join(self.output_dir, "index.txt")
926 # module is slow to import. Ensure that the two definitions match. 927 assert set(reporter_classes) == set(REPORTER_NAMES)
29 def current_module_id(self) -> str: 30 assert self.module 31 return self.module
34 """Return the current target (non-class; for a class return enclosing module).""" 35 assert self.module 36 if self.function:
42 """Return the current target (may be a class).""" 43 assert self.module 44 if self.function:
64 yield 65 assert self.module 66 self.module = None
81 else: 82 assert self.function 83 self.function = None
101 else: 102 assert self.classes 103 # Leave the innermost class.
113 """Produce a saved scope that can be entered with saved_scope()""" 114 assert self.module 115 # We only save the innermost class, which is sufficient since
614 bool_info = names["bool"].node 615 assert isinstance(bool_info, TypeInfo) 616 bool_type = Instance(bool_info, [])
700 if inst is None: 701 assert not self.final_iteration, "Cannot find builtins.list to add __path__" 702 self.defer()
709 if inst is None: 710 assert ( 711 not self.final_iteration 712 ), "Cannot find builtins.dict to add __annotations__" 713 self.defer()
723 inst = self.named_type_or_none("builtins.object")
724 assert inst is not None, "Cannot find builtins.object"
725 else:
732 else:
733 assert t is not None, f"type should be specified for {name}"
734 typ = UnboundType(t)
761 """ 762 assert tree.fullname == "typing" 763 for alias, target_name in type_aliases.items():
781 """ 782 assert tree.fullname == "typing_extensions" 783
803 target = self.named_type_or_none(target_name, []) 804 assert target is not None 805 # Transform List to List[Any], etc.
824 if name in tree.names: 825 assert isinstance(tree.names[name].node, PlaceholderNode) 826 del tree.names[name]
940 if defn.type: 941 assert isinstance(defn.type, CallableType) 942 has_self_type = self.update_function_type_variables(defn.type, defn)
949 # Method definition 950 assert self.type is not None 951 defn.info = self.type
952 if defn.type is not None and defn.name in ("__init__", "__init_subclass__"):
953 assert isinstance(defn.type, CallableType)
954 if isinstance(get_proper_type(defn.type.ret_type), AnyType):
962 self.check_classvar_in_signature(defn.type) 963 assert isinstance(defn.type, CallableType) 964 # Signature must be analyzed in the surrounding scope so that
973 return 974 assert isinstance(result, ProperType) 975 if isinstance(result, CallableType):
1002 if isinstance(defn, FuncDef): 1003 assert isinstance(defn.type, CallableType) 1004 defn.type = set_callable_name(defn.type, defn)
1009 if self.is_class_scope(): 1010 assert self.type is not None 1011 # Mark protocol methods with empty bodies as implicitly abstract.
1042 ) 1043 assert ret_type is not None, "Internal error: typing.Coroutine not found" 1044 defn.type = defn.type.copy_modified(ret_type=ret_type)
1084 if has_self_type: 1085 assert self.type is not None and self.type.self_type is not None 1086 leading_type: Type = self.type.self_type
1113 """Does this (analyzed or not) type represent the expected Self type for a method?""" 1114 assert self.type is not None 1115 typ = get_proper_type(typ)
1183 """ 1184 assert self.type is not None 1185 info = self.type
1250 typ = function_type(first_item.func, self.function_type()) 1251 assert isinstance(typ, CallableType) 1252 typ.definition = first_item
1266 if impl is not None: 1267 assert impl is defn.items[-1] 1268 defn.items = defn.items[:-1]
1356 if is_trivial_body(impl.body) and self.is_class_scope() and not self.is_stub_file: 1357 assert self.type is not None 1358 if self.type.is_protocol:
1382 callable = function_type(item.func, self.function_type()) 1383 assert isinstance(callable, CallableType) 1384 callable.definition = item
1458 if defn.items: 1459 assert isinstance(defn.items[0], Decorator) 1460 defn.items[0].func.is_trivial_body = True
1495 else:
1496 assert False, f"The 'item' variable is an unexpected type: {type(item)}"
1497 class_status.append(inner.is_class)
1505 else:
1506 assert False, f"Unexpected impl type: {type(defn.impl)}"
1507 class_status.append(inner.is_class)
1529 first_item = defn.items[0] 1530 assert isinstance(first_item, Decorator) 1531 deleted_items = []
1545 setter_func_type = function_type(item.func, self.function_type()) 1546 assert isinstance(setter_func_type, CallableType) 1547 bare_setter_type = setter_func_type
1589 if self.is_class_scope(): 1590 assert self.type is not None 1591 func.info = self.type
1610 typ = defn.type 1611 assert isinstance(typ, CallableType) 1612 a.bind_function_type_variables(typ, defn)
1647 sig = fdef.type 1648 assert isinstance(sig, CallableType) 1649 if len(sig.arg_types) < len(fdef.arguments):
1717 if self.is_class_scope(): 1718 assert self.type is not None, "No type set at class scope" 1719 if self.type.is_protocol:
1878 else:
1879 assert type_param.kind == TYPE_VAR_TUPLE_KIND
1880 tuple_fallback = self.named_type("builtins.tuple", [self.object_type()])
2012 def setup_alias_type_vars(self, defn: ClassDef) -> None: 2013 assert defn.info.special_alias is not None 2014 defn.info.special_alias.alias_tvars = list(defn.type_vars)
2020 target = defn.info.special_alias.target 2021 assert isinstance(target, ProperType) 2022 if isinstance(target, TypedDictType):
2026 else:
2027 assert False, f"Unexpected special alias type: {type(target)}"
2028
2221 node = self.lookup(p.name, context) 2222 assert node is not None 2223 assert isinstance(node.node, TypeVarLikeExpr)
2222 assert node is not None 2223 assert isinstance(node.node, TypeVarLikeExpr) 2224 declared_tvars.append((p.name, node.node))
2343 ) -> tuple[str, TypeVarLikeExpr] | None: 2344 assert not is_unpacked or not is_typealias_param, "Mutually exclusive conditions" 2345 sym = self.lookup_qualified(t.name, t)
2363 else: 2364 assert isinstance(sym.node, TypeVarExpr) 2365 return t.name, sym.node
2494 info = self.globals[defn.name].node 2495 assert isinstance(info, TypeInfo) 2496 else:
2825 inst = fill_typevars(metaclass_info) 2826 assert isinstance(inst, Instance) 2827 declared_metaclass = inst
3490 lvalue = s.lvalues[0] 3491 assert isinstance(lvalue, NameExpr) 3492 lvalue.is_special_form = True
3587 # We need to exclude bare Final. 3588 assert isinstance(s.unanalyzed_type, UnboundType) 3589 if not s.unanalyzed_type.args:
3656 return False 3657 assert isinstance(s.unanalyzed_type, UnboundType) 3658 if len(s.unanalyzed_type.args) > 1:
3685 lval = s.lvalues[0] 3686 assert isinstance(lval, RefExpr) 3687
3716 lval = s.lvalues[0] 3717 assert isinstance(lval, RefExpr) 3718 if isinstance(lval, MemberExpr):
3723 else: 3724 assert self.function_stack 3725 if self.function_stack[-1].name != "__init__":
3774 if isinstance(lval, MemberExpr) and self.is_self_member_ref(lval): 3775 assert self.type, "Self member outside a class" 3776 cur_node = self.type.names.get(lval.name, None)
3777 if cur_node and isinstance(cur_node.node, Var) and cur_node.node.is_final: 3778 assert self.function_stack 3779 current_function = self.function_stack[-1]
4158 current_node = existing.node if existing else alias_node 4159 assert isinstance(current_node, TypeAlias) 4160 self.disable_invalid_recursive_aliases(s, current_node, s.rvalue)
4161 if self.is_class_scope(): 4162 assert self.type is not None 4163 if self.type.is_protocol:
4309 if escape_comprehensions: 4310 assert isinstance(lval, NameExpr), "assignment expression target must be NameExpr" 4311 if isinstance(lval, NameExpr):
4429 name = unmangle(name) 4430 assert self.locals[-1] is not None, "No locals at function scope" 4431 existing = self.locals[-1].get(name)
4459 if kind == MDEF: 4460 assert self.type is not None 4461 v.info = self.type
4535 if self.is_self_member_ref(lval): 4536 assert self.type, "Self member outside a class" 4537 cur_node = self.type.names.get(lval.name)
4702 else: 4703 assert isinstance(call.analyzed, TypeVarExpr) 4704 updated = (
4758 name = unmangle(name) 4759 assert isinstance(call.callee, RefExpr) 4760 typevarlike_type = (
4916 lvalue = s.lvalues[0] 4917 assert isinstance(lvalue, NameExpr) 4918 if s.type:
4984 else: 4985 assert isinstance(call.analyzed, ParamSpecExpr) 4986 updated = default != call.analyzed.default
5047 else: 5048 assert isinstance(call.analyzed, TypeVarTupleExpr) 5049 updated = default != call.analyzed.default
5109 return 5110 assert isinstance(s.type, UnboundType) 5111 if self.is_class_scope() and isinstance(lvalue, NameExpr):
5115 analyzed = self.anal_type(s.type) 5116 assert self.type is not None 5117 if (
5206 elif isinstance(lval, MemberExpr) and self.is_self_member_ref(lval): 5207 assert self.type is not None 5208 lnode = self.type.names.get(lval.name)
5213 if isinstance(lnode.node, MypyFile) and lnode.node is not rnode.node: 5214 assert isinstance(lval, (NameExpr, MemberExpr)) 5215 self.fail(
5221 elif lval.is_inferred_def: 5222 assert rnode.node is not None 5223 lnode.node = rnode.node
5255 attrs.append(item.value) 5256 assert self.type 5257 self.type.deletable_attributes = attrs
5313 for super_type in self.type.mro[1:-1]: 5314 assert super_type.slots is not None 5315 slots.extend(super_type.slots)
5457 if s.unanalyzed_type: 5458 assert isinstance(s.unanalyzed_type, ProperType) 5459 actual_targets = [t for t in s.target if t is not None]
5669 current_node = existing.node if existing else alias_node 5670 assert isinstance(current_node, TypeAlias) 5671 self.disable_invalid_recursive_aliases(s, current_node, s.value)
5966 elif isinstance(base.node, TypeAlias) and base.node.no_args: 5967 assert isinstance(base.node.target, ProperType) 5968 if isinstance(base.node.target, Instance):
6212 if analyzed is not None: 6213 assert isinstance(analyzed, ProperType), "Cannot use type aliases for promotions" 6214 expr.type = analyzed
6344 if b: 6345 assert isinstance(b.node, MypyFile) 6346 table = b.node.names
6411 """ 6412 assert self.statement 6413 line_diff = self.statement.line - node.line
6426 """Similar to above, but check if a node is defined before current class.""" 6427 assert self.type is not None 6428 if node is None:
6479 elif isinstance(node, TypeAlias) and node.no_args: 6480 assert isinstance(node.target, ProperType) 6481 if isinstance(node.target, Instance):
6590 ret = self.lookup_fully_qualified_or_none(fullname) 6591 assert ret is not None, fullname 6592 return ret
6661 sym = self.lookup_fully_qualified(fullname) 6662 assert sym, "Internal error: attempted to construct unknown type" 6663 node = sym.node
6663 node = sym.node 6664 assert isinstance(node, TypeInfo), node 6665 if args:
6675 if isinstance(node, TypeAlias): 6676 assert isinstance(node.target, Instance) # type: ignore[misc] 6677 node = node.target.type
6677 node = node.target.type 6678 assert isinstance(node, TypeInfo), node 6679 if args is not None:
6854 """Add local variable or function.""" 6855 assert self.is_func_scope() 6856 name = node.name
6895 symbol_node._fullname = self.qualified_name(name) 6896 assert self.type is not None # guaranteed by is_class_scope 6897 symbol_node.info = self.type
6910 """Add an alias to an existing symbol through import.""" 6911 assert not module_hidden or not module_public 6912
6995 """ 6996 assert not self.final_iteration, "Must not defer during final iteration" 6997 if force_progress:
7046 fullname = self.qualified_name(name) 7047 assert self.statement 7048 placeholder = PlaceholderNode(
7153 n = self.locals[-1] 7154 assert n is not None 7155 return n
7160 n = self.locals[-1] 7161 assert n is not None 7162 if escape_comprehensions:
7162 if escape_comprehensions: 7163 assert len(self.locals) == len(self.scope_stack) 7164 # Retrieve the symbol table from the enclosing non-comprehension scope.
7171 names_candidate = self.locals[-1 - i] 7172 assert ( 7173 names_candidate is not None 7174 ), "Escaping comprehension from invalid scope" 7175 names = names_candidate
7177 else: 7178 assert False, "Should have at least one non-comprehension scope" 7179 else:
7180 names = n 7181 assert names is not None 7182 elif self.type is not None:
7317 # In case it's a bug and we don't really have context 7318 assert ctx is not None, msg 7319 if isinstance(msg, ErrorMessage):
7381 return None 7382 assert info.tuple_type, "NamedTuple without tuple type" 7383 fallback = Instance(info, [])
7672 else: 7673 assert False 7674
176 target_info = target_sym.node 177 assert isinstance(target_info, TypeInfo) 178 promote_targets.append(Instance(target_info, []))
183 int_sym = builtin_names["int"] 184 assert isinstance(int_sym.node, TypeInfo) 185 int_sym.node._promote.append(Instance(defn.info, []))
138 base = self.api.named_type_or_none(fullname) 139 assert base is not None 140 info = self.api.basic_new_typeinfo(name, base, line)
207 for seq_item in seq_items: 208 assert isinstance(seq_item, (TupleExpr, ListExpr)) 209 name, value = seq_item.items
209 name, value = seq_item.items 210 assert isinstance(name, StrExpr) 211 items.append(name.value)
256 values = [None] * len(items) 257 assert len(items) == len(values) 258 return new_class_name, items, values, True
118 """ 119 assert state.tree is not None 120 remove_imported_names_from_symtable(state.tree.names, "builtins")
160 defined_in_this_class = name in info.names 161 assert isinstance(sym.node, Var) 162 # This needs to mimic the logic in SemanticAnalyzer.analyze_member_lvalue()
188 state = graph[id] 189 assert state.tree is not None 190 state.manager.semantic_analyzer.prepare_file(state.tree)
208 # Just pick some module inside the current SCC for error context. 209 assert state.tree is not None 210 with analyzer.file_context(state.tree, state.options):
220 state = graph[next_id] 221 assert state.tree is not None 222 deferred, incomplete, progress = semantic_analyze_target(
229 if final_iteration: 230 assert not all_deferred, "Must not defer during final iteration" 231 # Reverse to process the targets in the same order on every iteration. This avoids
277 tree = graph[module].tree 278 assert tree is not None 279 # In principle, functions can be processed in arbitrary order,
292 analyzer = graph[module].manager.semantic_analyzer 293 assert isinstance(node, (FuncDef, OverloadedFuncDef, Decorator)), node 294 process_top_level_function(
325 # Just pick some module inside the current SCC for error context. 326 assert state.tree is not None 327 with analyzer.file_context(state.tree, state.options):
338 if final_iteration: 339 assert not deferred, "Must not defer during final iteration" 340 if not progress:
385 tree = state.tree 386 assert tree is not None 387 analyzer = state.manager.semantic_analyzer
428 state = graph[module] 429 assert state.tree 430 analyzer = TypeArgumentAnalyzer(
481 while incomplete: 482 assert num_passes < 10, "Internal error: too many class plugin hook passes" 483 num_passes += 1
487 tree = state.tree 488 assert tree 489 for _, node, _ in tree.local_definitions():
542 builtins = graph["builtins"].tree 543 assert builtins 544 for module in scc:
546 tree = state.tree 547 assert tree 548 for _, node, _ in tree.local_definitions():
573 574 assert info.tuple_type is not None # Set by update_tuple_type() above. 575 shared_self_type = TypeVarType(
606 arg_kinds = [arg.kind for arg in args] 607 assert None not in types 608 signature = CallableType(cast(list[Type], types), arg_kinds, items, ret, function_type)
685 ctx = named_tuple_info.names[prohibited].node
686 assert ctx is not None
687 self.fail(f'Cannot overwrite NamedTuple attribute "{prohibited}"', ctx)
92 # Create the corresponding class definition if the aliased type is subtypeable 93 assert isinstance(call.analyzed, NewTypeExpr) 94 if isinstance(old_type, TupleType):
128 # If so, add it to the symbol table. 129 assert isinstance(call.analyzed, NewTypeExpr) 130 # As we do for normal classes, create the TypeInfo only once, then just
258 previous_sym = info.names["__init__"].node 259 assert isinstance(previous_sym, FuncDef) 260 updated = old_type != previous_sym.arguments[1].variable.type
291 fallback = typ.partial_fallback 292 assert fallback.type.fullname == "builtins.tuple" 293 items = []
91 self.seen_aliases.add(t)
92 assert t.alias is not None, f"Unfixed type alias {t.type_ref}"
93 is_error, is_invalid = self.validate_args(
134 if isinstance(unpacked, Instance): 135 assert unpacked.type.fullname == "builtins.tuple" 136 t.args = unpacked.args
143 tvt = type_vars[prefix] 144 assert isinstance(tvt, TypeVarTupleType) 145 start, middle, end = split_with_prefix_and_suffix(
197 198 assert info.typeddict_type is not None 199 base_typed_dict = info.typeddict_type
229 target = get_proper_type(base.node.target) 230 assert isinstance(target, TypedDictType) 231 return target.fallback.type
232 else: 233 assert False 234 elif isinstance(base, IndexExpr):
234 elif isinstance(base, IndexExpr): 235 assert isinstance(base.base, RefExpr) 236 return self._parse_typeddict_base(base.base, ctx)
237 else: 238 assert isinstance(base, CallExpr) 239 assert isinstance(base.analyzed, TypedDictExpr)
238 assert isinstance(base, CallExpr) 239 assert isinstance(base.analyzed, TypedDictExpr) 240 return base.analyzed.info
542 items, types, ok = res 543 assert total is not None 544 return args[0].value, items, types, total, tvar_defs, ok
606 ) 607 assert fallback is not None 608 info = existing_info or self.api.basic_new_typeinfo(name, fallback, line)
155 # Look for differences in nested class symbol table entries. 156 assert isinstance(item1[-1], dict) 157 assert isinstance(item2[-1], dict)
156 assert isinstance(item1[-1], dict) 157 assert isinstance(item2[-1], dict) 158 triggers |= compare_symbol_table_snapshots(item_name, item1[-1], item2[-1])
219 else: 220 assert symbol.kind != UNBOUND_IMPORTED 221 if node and get_prefix(node.fullname) != name_prefix:
330 # Other node types are handled elsewhere. 331 assert False, type(node) 332
485 else: 486 assert isinstance(v, ParamSpecType) 487 tv = v.copy_modified(id=tid)
523 def visit_type_alias_type(self, typ: TypeAliasType) -> SnapshotItem:
524 assert typ.alias is not None
525 return ("TypeAliasType", typ.alias.fullname, snapshot_types(typ.args))
128 """ 129 assert new.fullname == old.fullname 130 # Find the mapping from new to old node identities for all nodes
139 node = replace_nodes_in_ast(new, replacement_map) 140 assert node is old 141 # Also replace AST node references in the *new* symbol table (we'll
365 new = self.replacements[node] 366 assert isinstance(new, TypeInfo) 367 type_state.reset_all_subtype_caches_for(new)
428 def visit_type_alias_type(self, typ: TypeAliasType) -> None: 429 assert typ.alias is not None 430 typ.alias = self.fixup(typ.alias)
166 # See also #4814. 167 assert isinstance(node.type, CallableType) 168 node.type.variables = ()
250 # self, since only those can define new attributes. 251 assert self.type is not None 252 if lvalue.name in self.type.names:
456 info = rvalue.analyzed.info
457 assert info.typeddict_type is not None
458 prefix = f"{self.scope.current_full_target()}.{info.name}"
465 elif o.is_alias_def: 466 assert len(o.lvalues) == 1 467 lvalue = o.lvalues[0]
467 lvalue = o.lvalues[0] 468 assert isinstance(lvalue, NameExpr) 469 typ = get_proper_type(self.type_map.get(lvalue))
578 # differently, in the semantic analyzer.) 579 assert not lvalue.is_new_def 580 return UninhabitedType()
976 self.seen_aliases.add(typ) 977 assert typ.alias is not None 978 trigger = make_trigger(typ.alias.fullname)
1011 # This type should exist only temporarily during type inference 1012 assert False, "Should not see an erased type here" 1013
1017 def visit_partial_type(self, typ: PartialType) -> list[str]: 1018 assert False, "Should not see a partial type here" 1019
1127 continue 1128 assert id == node.fullname 1129 deps = get_dependencies(node, type_map, python_version, options)
65 66 assert sym.fullname not in m 67
4 def trigger_to_target(s: str) -> str: 5 assert s[0] == "<" 6 # Strip off the angle brackets
439 return remaining, (module, path), errors 440 assert isinstance(result, NormalUpdate) # Work around #4124 441 module, path, remaining, tree = result
621 # Parse error somewhere in the program -- a blocker 622 assert err.module_with_blocker 623 restore([module] + [st.id for st in new_modules])
651 state.parse_file() 652 assert state.tree is not None, "file must be at least parsed" 653 t0 = time.time()
696 """ 697 assert modules 698 # Sort for repeatable results.
882 for id, nodes in sorted(todo.items(), key=lambda x: x[0]): 883 assert id not in up_to_date_modules 884 triggered |= reprocess_nodes(manager, graph, id, nodes, deps, processed_targets)
1085 tree = graph[module_id].tree 1086 assert tree is not None, "Tree must be processed at this stage" 1087 new_deps = get_dependencies_of_target(
1140 # bodies as separate entities for simplicity. 1141 assert file is not None 1142 if node.fullname != target:
1271 # that we need to propagate. 1272 assert state.path is not None 1273 messages = refresh_file(state.id, state.path)
1274 tree = state.tree 1275 assert tree # Will be fine, due to refresh_file() above 1276 for imp in tree.imports:
101 102 assert not (_NON_BINARY_MAGIC_METHODS & BINARY_MAGIC_METHODS) 103
310 source_any = top if isinstance(p_top, AnyType) else bottom 311 assert isinstance(source_any, ProperType) and isinstance(source_any, AnyType) 312 return AnyType(TypeOfAny.from_another_any, source_any=source_any)
436 if is_linear and target_id in tvars: 437 assert target_id is not None 438 if c.op == SUBTYPE_OF:
464 else: 465 assert c.op == SUPERTYPE_OF 466 if c.target in lowers[c.type_var]:
156 for defn in o.expanded: 157 assert isinstance(defn, FuncDef) 158 self.visit_func_def(defn)
160 if o.type: 161 assert isinstance(o.type, CallableType) 162 sig = o.type
280 """ 281 assert self.typemap 282 typemap = self.typemap
490 if t.type_of_any == TypeOfAny.from_another_any: 491 assert t.source_any 492 assert t.source_any.type_of_any != TypeOfAny.from_another_any
491 assert t.source_any 492 assert t.source_any.type_of_any != TypeOfAny.from_another_any 493 t = t.source_any
65 if self.show_ids:
66 assert self.id_mapper is not None
67 tag += f"<{self.get_id(obj)}>"
82 elif kind.is_optional():
83 assert arg.initializer is not None
84 args.append(("default", [arg.variable, arg.initializer]))
371 # This is currently only used for TypedDict where all keys are strings.
372 assert isinstance(key, StrExpr)
373 dict_items.append(f"{key.accept(self)}: {value.accept(self)}")
902 nt_fields = self._get_namedtuple_fields(base) 903 assert isinstance(base.args[0], StrExpr) 904 typename = base.args[0].value
1737 """ 1738 assert mod.path is not None, "Not found module was not skipped" 1739 with open(mod.path, "rb") as f:
1816 ) 1817 assert mod.ast is not None, "This function must be used only with analyzed modules" 1818 mod.ast.accept(gen)
1838 for mod in py_modules + pyc_modules:
1839 assert mod.path is not None, "Not found module was not skipped"
1840 target = mod.module.replace(".", "/")
112 if inferred: 113 assert ctx.docstring is not None 114 if is_pybind11_overloaded_function_docstring(ctx.docstring, ctx.name):
398 parsed_type = parse_type_comment(type_name, 0, 0, None)[1] 399 assert parsed_type is not None, type_name 400 return self.print_annotation(parsed_type, self.known_modules, local_modules)
298 # note that this includes the case the stub simply defines `__all__: list[str]`
299 assert "__all__" in stub.names
300 public_names_in_stub = {m for m, o in stub.names.items() if o.module_public}
399 obj_mod = obj.__module__ 400 except Exception: 401 pass 402 else:
428 continue 429 assert stub_entry is not None 430 try:
431 runtime_entry = getattr(runtime, entry, MISSING) 432 except Exception: 433 # Catch all exceptions in case the runtime raises an unexpected exception 434 # from __getattr__ or similar. 435 continue 436 yield from verify(stub_entry, runtime_entry, object_path + [entry])
456 ) 457 except Exception: 458 # The class probably wants its subclasses to do something special. 459 # Examples: ctypes.Array, ctypes._SimpleCData 460 pass 461
509 base = typ.__base__
510 assert base is not None, f"Type {typ} has no base"
511 return _shape_differs(typ, base)
669 stub_to_verify = next((t.names[entry].node for t in stub.mro if entry in t.names), MISSING) 670 assert stub_to_verify is not None 671 try:
675 runtime_attr = inspect.getattr_static(runtime, mangled_entry, MISSING) 676 except Exception: 677 # Catch all exceptions in case the runtime raises an unexpected exception 678 # from __getattr__ or similar. 679 continue 680
970 for func in map(_resolve_funcitem_from_decorator, stub.items): 971 assert func is not None, "Failed to resolve decorated overload" 972 args = maybe_strip_cls(stub.name, func.arguments)
986 for func in map(_resolve_funcitem_from_decorator, stub.items): 987 assert func is not None, "Failed to resolve decorated overload" 988 args = maybe_strip_cls(stub.name, func.arguments)
1418 def _verify_readonly_property(stub: nodes.Decorator, runtime: Any) -> Iterator[str]: 1419 assert stub.func.is_property 1420 if isinstance(runtime, property):
1765 ) 1766 except Exception: 1767 pass 1768
1803 ) 1804 except Exception: 1805 pass 1806
1818 sig = inspect._signature_fromstr(inspect.Signature, runtime, sig) # type: ignore[attr-defined] 1819 assert isinstance(sig, inspect.Signature) 1820 new_params = [
1896 builtins = get_stub("builtins")
1897 assert builtins is not None
1898 type_info = builtins.names["function"].node
1898 type_info = builtins.names["function"].node 1899 assert isinstance(type_info, nodes.TypeInfo) 1900 fallback = mypy.types.Instance(type_info, [anytype()])
1956 node = get_mypy_node_for_name(parts[0], parts[1]) 1957 assert isinstance(node, nodes.TypeInfo) 1958 any_type = mypy.types.AnyType(mypy.types.TypeOfAny.special_form)
2086 def exists_in_version(module: str) -> bool:
2087 assert version_info is not None
2088 parts = module.split(".")
499 # 'import {module} as {alias}'
500 assert "." not in alias # invalid syntax
501 self.module_for[alias] = None
568 elif name in self.reexports:
569 assert "." not in name # Because reexports only has nonqualified names
570 result.append(f"import {name} as {name}\n")
119 if proper_subtype: 120 assert not self.ignore_pos_arg_names and not self.ignore_declared_variance 121 else:
121 else: 122 assert not self.erase_instances and not self.keep_erased_types 123
159 else: 160 assert ( 161 not ignore_type_params 162 and not ignore_pos_arg_names 163 and not ignore_declared_variance 164 and not always_covariant 165 and not ignore_promotions 166 and options is None 167 ), "Don't pass both context and individual flags" 168 if type_state.is_assumed_subtype(left, right):
219 else: 220 assert ( 221 not ignore_promotions and not erase_instances and not keep_erased_types 222 ), "Don't pass both context and individual flags" 223 if type_state.is_assumed_proper_subtype(left, right):
548 erased = erase_type(t) 549 assert isinstance(erased, Instance) 550 t = erased
554 # all the heavy lifting is done by the tuple subtyping. 555 assert right.type.type_var_tuple_prefix is not None 556 assert right.type.type_var_tuple_suffix is not None
555 assert right.type.type_var_tuple_prefix is not None 556 assert right.type.type_var_tuple_suffix is not None 557 prefix = right.type.type_var_tuple_prefix
559 tvt = right.type.defn.type_vars[prefix] 560 assert isinstance(tvt, TypeVarTupleType) 561 fallback = tvt.tuple_fallback
740 call = find_member("__call__", right, right, is_operator=True)
741 assert call is not None
742 if self._is_subtype(left, call):
784 unpack = get_proper_type(unpack.upper_bound) 785 assert ( 786 isinstance(unpack, Instance) 787 and unpack.type.fullname == "builtins.tuple" 788 ) 789 li = unpack.args[0]
836 right_unpack = right.items[right_unpack_index] 837 assert isinstance(right_unpack, UnpackType) 838 right_unpacked = get_proper_type(right_unpack.type)
841 return False 842 assert right_unpacked.type.fullname == "builtins.tuple" 843 right_item = right_unpacked.args[0]
871 left_unpack = left.items[left_unpack_index] 872 assert isinstance(left_unpack, UnpackType) 873 left_unpacked = get_proper_type(left_unpack.type)
888 return False 889 assert left_unpacked.type.fullname == "builtins.tuple" 890 left_item = left_unpacked.args[0]
976 call = find_member("__call__", right, right, is_operator=True)
977 assert call is not None
978 if self._is_subtype(left, call):
1144 def visit_type_alias_type(self, left: TypeAliasType) -> bool:
1145 assert False, f"This should be never called, got {left}"
1146
1182 """ 1183 assert right.type.is_protocol 1184 if skip is None:
1208 supertype = find_member(member, right, left) 1209 assert supertype is not None 1210
1244 # would return False above. 1245 assert supertype is not None and subtype is not None 1246 if not is_subtype(supertype, subtype, options=options):
1374 if method.is_property: 1375 assert isinstance(method, OverloadedFuncDef) 1376 dec = method.items[0]
1376 dec = method.items[0] 1377 assert isinstance(dec, Decorator) 1378 # Pass on is_lvalue flag as this may be a property with different setter type.
1441 elif method.is_property: # this could be settable property 1442 assert isinstance(method, OverloadedFuncDef) 1443 dec = method.items[0]
1443 dec = method.items[0] 1444 assert isinstance(dec, Decorator) 1445 if dec.var.is_settable_property or setattr_meth:
1523 ): 1524 assert isinstance(p_typ, FunctionLike) 1525 if class_obj and not (
1534 if node.is_property and not class_obj: 1535 assert isinstance(signature, CallableType) 1536 if (
1554 1555 assert tp.is_protocol 1556 result: list[str] = []
1853 right_by_position = right.try_synthesizing_arg_from_vararg(None) 1854 assert right_by_position is not None 1855
1856 i = right_star.pos 1857 assert i is not None 1858 while i < len(left.arg_kinds) and left.arg_kinds[i].is_positional():
1862 left_by_position = left.argument_by_position(i) 1863 assert left_by_position is not None 1864
1892 right_by_name = right.try_synthesizing_arg_from_kwarg(None) 1893 assert right_by_name is not None 1894
1896 left_by_name = left.argument_by_name(name) 1897 assert left_by_name is not None 1898
2186 tv = info.defn.type_vars[i] 2187 assert isinstance(tv, TypeVarType) 2188 if tv.variance != VARIANCE_NOT_READY:
2296 return False 2297 assert unpacked.type.fullname == "builtins.tuple" 2298 if not isinstance(get_proper_type(unpacked.args[0]), AnyType):
707 """Recheck the module given by state.""" 708 assert state.path is not None 709 self.fgmanager.flush_cache()
715 self.reload(state) 716 assert state.tree is not None 717 return state.tree
851 mod_obj = split_target(self.graph, s) 852 assert mod_obj 853 mod, obj = mod_obj
96 # Record an extra file needed for the test case.
97 assert item.arg is not None
98 contents = expand_variables("\n".join(item.data))
109 # Use an alternative stub file for the builtins module. 110 assert item.arg is not None 111 mpath = join(os.path.dirname(case.file), item.arg)
115 # Use an alternative stub file for the typing module. 116 assert item.arg is not None 117 src_path = join(os.path.dirname(case.file), item.arg)
121 # Use an alternative stub file for the _typeshed module. 122 assert item.arg is not None 123 src_path = join(os.path.dirname(case.file), item.arg)
127 passnum = 1 if item.id == "stale" else int(item.id[len("stale") :])
128 assert passnum > 0
129 modules = set() if item.arg is None else {t.strip() for t in item.arg.split(",")}
132 passnum = 1 if item.id == "rechecked" else int(item.id[len("rechecked") :])
133 assert passnum > 0
134 modules = set() if item.arg is None else {t.strip() for t in item.arg.split(",")}
137 passnum = 1 if item.id == "targets" else int(item.id[len("targets") :])
138 assert passnum > 0
139 reprocessed = [] if item.arg is None else [t.strip() for t in item.arg.split(",")]
142 # File/directory to delete during a multi-step test case 143 assert item.arg is not None 144 m = re.match(r"(.*)\.([0-9]+)$", item.arg)
192 passnum = int(item.id[len("out") :])
193 assert passnum > 1
194 output2[passnum] = tmp_output
293 ) -> None: 294 assert isinstance(parent, DataFileCollector) 295 super().__init__(name, parent)
318 parent = self.getparent(DataSuiteCollector) 319 assert parent is not None, "Should not happen" 320 suite = parent.obj()
327 if save_dir: 328 assert self.tmpdir is not None 329 target_dir = os.path.join(save_dir, os.path.basename(self.tmpdir))
331 if not os.path.isabs(target_dir): 332 assert self.old_cwd 333 target_dir = os.path.join(self.old_cwd, target_dir)
352 num = int(m.group(1)) 353 assert num >= 2 354 target_path = re.sub(r"\.[0-9]+$", "", path)
365 for num, paths in self.deleted_paths.items(): 366 assert num >= 2 367 for path in paths:
720
721 assert os.path.isdir(
722 suite.data_prefix
723 ), f"Test data prefix ({suite.data_prefix}) not set correctly"
724
749 collector = super().from_parent(parent, name=name) 750 assert isinstance(collector, DataFileCollector) 751 return collector
403 # Sanity check to avoid unexpected deletions
404 assert op.path.startswith("tmp")
405 shutil.rmtree(op.path)
1 import shlex 2 import subprocess 3 import sys
41 p = p_test_data / f"{data_file_prefix}-meta-{uuid.uuid4()}.test"
42 assert not p.exists()
43 data_suite = dedent_docstring(data_suite)
51 print(f">> {cmd}")
52 proc = subprocess.run(extra_args, capture_output=True, check=False, cwd=p_root)
53 if proc.returncode == 0:
14 render_diff_range(expected_ranges, expected, output=output) 15 assert output.getvalue() == " hello (diff)\n world\n" 16 output = io.StringIO()
17 render_diff_range(actual_ranges, actual, output=output) 18 assert output.getvalue() == " goodbye (diff)\n world\n" 19
26 render_diff_range(expected_ranges, expected, output=output, indent=0) 27 assert output.getvalue() == "a\nb\nc\n...\nf\ng\nh\ncircle (diff)\ni\nj\n" 28 output = io.StringIO()
29 render_diff_range(actual_ranges, actual, output=output, indent=0) 30 assert output.getvalue() == "a\nb\nc\n...\nf\ng\nh\nsquare (diff)\ni\nj\n" 31
35 36 assert diff_ranges(a, b) == ( 37 [(0, 0), (0, 2), (2, 2), (2, 2)], 38 [(0, 0), (0, 2), (2, 2), (2, 2)], 39 ) 40
43 44 assert diff_ranges(a, b) == ( 45 [(0, 1), (1, 2), (2, 2), (2, 2)], 46 [(0, 1), (1, 2), (2, 2), (2, 2)], 47 )
26 # Assert 27 assert "Invalid testcase id 'foo-XFAIL'" in result.stdout 28
44 ) 45 assert expected in result.stdout 46
58 # Assert 59 assert "version>=3.9 always true since minimum runtime version is (3, 9)" in actual.stdout 60
72 # Assert 73 assert "version==3.7 always false since minimum runtime version is (3, 9)" in actual.stdout
134 ) 135 assert result.input_updated == expected
43 result = _find_config_file() 44 assert result is None 45
58 result = _find_config_file() 59 assert result is not None 60 assert Path(result[2]).resolve() == config.resolve()
59 assert result is not None 60 assert Path(result[2]).resolve() == config.resolve() 61
65 result = _find_config_file() 66 assert result is None 67
69 result = _find_config_file() 70 assert result is not None 71 hg = child / ".hg"
74 result = _find_config_file() 75 assert result is None 76
96 result = _find_config_file() 97 assert result is not None 98 assert os.path.basename(result[2]) == "mypy.ini"
97 assert result is not None 98 assert os.path.basename(result[2]) == "mypy.ini" 99
101 result = _find_config_file() 102 assert result is not None 103 assert os.path.basename(result[2]) == ".mypy.ini"
102 assert result is not None 103 assert os.path.basename(result[2]) == ".mypy.ini" 104
106 result = _find_config_file() 107 assert result is not None 108 assert os.path.basename(result[2]) == "pyproject.toml"
107 assert result is not None 108 assert os.path.basename(result[2]) == "pyproject.toml" 109
111 result = _find_config_file() 112 assert result is not None 113 assert os.path.basename(result[2]) == "setup.cfg"
112 assert result is not None 113 assert os.path.basename(result[2]) == "setup.cfg" 114
128 result = _find_config_file() 129 assert result is not None 130 assert Path(result[2]).resolve() == parent_mypy.resolve()
129 assert result is not None 130 assert Path(result[2]).resolve() == parent_mypy.resolve()
80 finder = SourceFinder(FakeFSCache({"/setup.py"}), options)
81 assert crawl(finder, "/setup.py") == ("setup", "/")
82
83 finder = SourceFinder(FakeFSCache({"/a/setup.py"}), options)
84 assert crawl(finder, "/a/setup.py") == ("setup", "/a")
85
86 finder = SourceFinder(FakeFSCache({"/a/b/setup.py"}), options)
87 assert crawl(finder, "/a/b/setup.py") == ("setup", "/a/b")
88
89 finder = SourceFinder(FakeFSCache({"/a/setup.py", "/a/__init__.py"}), options)
90 assert crawl(finder, "/a/setup.py") == ("a.setup", "/")
91
92 finder = SourceFinder(FakeFSCache({"/a/invalid-name/setup.py", "/a/__init__.py"}), options)
93 assert crawl(finder, "/a/invalid-name/setup.py") == ("setup", "/a/invalid-name")
94
95 finder = SourceFinder(FakeFSCache({"/a/b/setup.py", "/a/__init__.py"}), options)
96 assert crawl(finder, "/a/b/setup.py") == ("setup", "/a/b")
97
100 )
101 assert crawl(finder, "/a/b/c/setup.py") == ("c.setup", "/a/b")
102
107 finder = SourceFinder(FakeFSCache({"/setup.py"}), options)
108 assert crawl(finder, "/setup.py") == ("setup", "/")
109
110 finder = SourceFinder(FakeFSCache({"/a/setup.py"}), options)
111 assert crawl(finder, "/a/setup.py") == ("setup", "/a")
112
113 finder = SourceFinder(FakeFSCache({"/a/b/setup.py"}), options)
114 assert crawl(finder, "/a/b/setup.py") == ("setup", "/a/b")
115
116 finder = SourceFinder(FakeFSCache({"/a/setup.py", "/a/__init__.py"}), options)
117 assert crawl(finder, "/a/setup.py") == ("a.setup", "/")
118
119 finder = SourceFinder(FakeFSCache({"/a/invalid-name/setup.py", "/a/__init__.py"}), options)
120 assert crawl(finder, "/a/invalid-name/setup.py") == ("setup", "/a/invalid-name")
121
122 finder = SourceFinder(FakeFSCache({"/a/b/setup.py", "/a/__init__.py"}), options)
123 assert crawl(finder, "/a/b/setup.py") == ("a.b.setup", "/")
124
127 )
128 assert crawl(finder, "/a/b/c/setup.py") == ("a.b.c.setup", "/")
129
135 finder = SourceFinder(FakeFSCache({"/setup.py"}), options)
136 assert crawl(finder, "/setup.py") == ("setup", "/")
137
138 finder = SourceFinder(FakeFSCache({"/a/setup.py"}), options)
139 assert crawl(finder, "/a/setup.py") == ("setup", "/a")
140
141 finder = SourceFinder(FakeFSCache({"/a/b/setup.py"}), options)
142 assert crawl(finder, "/a/b/setup.py") == ("setup", "/a/b")
143
144 finder = SourceFinder(FakeFSCache({"/a/setup.py", "/a/__init__.py"}), options)
145 assert crawl(finder, "/a/setup.py") == ("a.setup", "/")
146
147 finder = SourceFinder(FakeFSCache({"/a/invalid-name/setup.py", "/a/__init__.py"}), options)
148 assert crawl(finder, "/a/invalid-name/setup.py") == ("setup", "/a/invalid-name")
149
150 finder = SourceFinder(FakeFSCache({"/a/b/setup.py", "/a/__init__.py"}), options)
151 assert crawl(finder, "/a/b/setup.py") == ("a.b.setup", "/")
152
155 )
156 assert crawl(finder, "/a/b/c/setup.py") == ("a.b.c.setup", "/")
157
161 finder = SourceFinder(FakeFSCache({"/a/b/c/setup.py"}), options)
162 assert crawl(finder, "/a/b/c/setup.py") == ("c.setup", "/a/b")
163
166 )
167 assert crawl(finder, "/a/b/c/setup.py") == ("c.setup", "/a/b")
168
170 finder = SourceFinder(FakeFSCache({"/a/b/c/setup.py"}), options)
171 assert crawl(finder, "/a/b/c/setup.py") == ("setup", "/a/b/c")
172
179 finder = SourceFinder(FakeFSCache({"/a/pkg/a.py", "/b/pkg/b.py"}), options)
180 assert crawl(finder, "/a/pkg/a.py") == ("pkg.a", "/a")
181 assert crawl(finder, "/b/pkg/b.py") == ("pkg.b", "/b")
180 assert crawl(finder, "/a/pkg/a.py") == ("pkg.a", "/a")
181 assert crawl(finder, "/b/pkg/b.py") == ("pkg.b", "/b")
182
194 finder = SourceFinder(FakeFSCache(files), options)
195 assert find_sources_in_dir(finder, "/") == [
196 ("a2", "/pkg"),
197 ("e", "/pkg/a1/b/c/d"),
198 ("e", "/pkg/a2/b/c/d"),
199 ("f", "/pkg/a1/b"),
200 ("f", "/pkg/a2/b"),
201 ]
202
214 finder = SourceFinder(FakeFSCache(files), options)
215 assert find_sources_in_dir(finder, "/") == [
216 ("a2", "/pkg"),
217 ("a2.b.c.d.e", "/pkg"),
218 ("a2.b.f", "/pkg"),
219 ("e", "/pkg/a1/b/c/d"),
220 ("f", "/pkg/a1/b"),
221 ]
222
236 finder = SourceFinder(FakeFSCache(files), options)
237 assert find_sources_in_dir(finder, "/") == [
238 ("pkg.a1.b.c.d.e", "/"),
239 ("pkg.a1.b.f", "/"),
240 ("pkg.a2", "/"),
241 ("pkg.a2.b.c.d.e", "/"),
242 ("pkg.a2.b.f", "/"),
243 ]
244
246 finder = SourceFinder(FakeFSCache(files), options)
247 assert find_sources_in_dir(finder, "/") == [
248 ("a1.b.c.d.e", "/pkg"),
249 ("a1.b.f", "/pkg"),
250 ("a2", "/pkg"),
251 ("a2.b.c.d.e", "/pkg"),
252 ("a2.b.f", "/pkg"),
253 ]
254
261 finder = SourceFinder(FakeFSCache({"/a/pkg/a.py", "/b/pkg/b.py"}), options)
262 assert find_sources_in_dir(finder, "/") == [("pkg.a", "/a"), ("pkg.b", "/b")]
263
270 fscache = FakeFSCache({"/dir/a.py", f"/dir/venv/{excluded_dir}/b.py"})
271 assert find_sources(["/"], options, fscache) == [("a", "/dir")]
272 with pytest.raises(InvalidSourceList):
273 find_sources(["/dir/venv/"], options, fscache)
274 assert find_sources([f"/dir/venv/{excluded_dir}"], options, fscache) == [
275 ("b", f"/dir/venv/{excluded_dir}")
276 ]
277 assert find_sources([f"/dir/venv/{excluded_dir}/b.py"], options, fscache) == [
276 ]
277 assert find_sources([f"/dir/venv/{excluded_dir}/b.py"], options, fscache) == [
278 ("b", f"/dir/venv/{excluded_dir}")
279 ]
280
291 fscache = FakeFSCache(files)
292 assert find_sources(["/"], options, fscache) == [
293 ("a2", "/pkg"),
294 ("a2.b.c.d.e", "/pkg"),
295 ("e", "/pkg/a1/b/c/d"),
296 ]
297 assert find_sources(["/pkg/a1/b/f.py"], options, fscache) == [("f", "/pkg/a1/b")]
296 ]
297 assert find_sources(["/pkg/a1/b/f.py"], options, fscache) == [("f", "/pkg/a1/b")]
298 assert find_sources(["/pkg/a2/b/f.py"], options, fscache) == [("a2.b.f", "/pkg")]
297 assert find_sources(["/pkg/a1/b/f.py"], options, fscache) == [("f", "/pkg/a1/b")]
298 assert find_sources(["/pkg/a2/b/f.py"], options, fscache) == [("a2.b.f", "/pkg")]
299
302 fscache = FakeFSCache(files)
303 assert find_sources(["/"], options, fscache) == [
304 ("a2", "/pkg"),
305 ("a2.b.c.d.e", "/pkg"),
306 ("a2.b.f", "/pkg"),
307 ]
308 with pytest.raises(InvalidSourceList):
315 options.exclude = ["/a1/$"]
316 assert find_sources(["/pkg/a1"], options, fscache) == [
317 ("e", "/pkg/a1/b/c/d"),
318 ("f", "/pkg/a1/b"),
319 ]
320
323 fscache = FakeFSCache(files)
324 assert find_sources(["/"], options, fscache) == [
325 ("a2", "/pkg"),
326 ("a2.b.c.d.e", "/pkg"),
327 ("a2.b.f", "/pkg"),
328 ]
329 with pytest.raises(InvalidSourceList):
335 fscache = FakeFSCache(files)
336 assert find_sources(["/"], options, fscache) == [
337 ("a2", "/pkg"),
338 ("a2.b.c.d.e", "/pkg"),
339 ("a2.b.f", "/pkg"),
340 ]
341
343 fscache = FakeFSCache(files)
344 assert find_sources(["/"], options, fscache) == [
345 ("a2", "/pkg"),
346 ("a2.b.f", "/pkg"),
347 ("f", "/pkg/a1/b"),
348 ]
349
365 fscache = FakeFSCache(files) 366 assert len(find_sources(["/"], options, fscache)) == len(files) 367
375 fscache = FakeFSCache(files) 376 assert len(find_sources(["."], options, fscache)) == len(files)
30 ) 31 assert not result.errors 32
19 sys.stderr = self.sys_stderr 20 assert self.stdout.getvalue() == "" 21 assert self.stderr.getvalue() == ""
20 assert self.stdout.getvalue() == "" 21 assert self.stderr.getvalue() == "" 22
25 _, stderr, _ = mypy.api.run(["--some-bad-option"]) 26 assert isinstance(stderr, str) 27 assert stderr != ""
26 assert isinstance(stderr, str) 27 assert stderr != "" 28
31 _, stderr, _ = mypy.api.run([]) 32 assert isinstance(stderr, str) 33 assert stderr != ""
32 assert isinstance(stderr, str) 33 assert stderr != "" 34
37 stdout, _, _ = mypy.api.run(["--help"]) 38 assert isinstance(stdout, str) 39 assert stdout != ""
38 assert isinstance(stdout, str) 39 assert stdout != "" 40
43 stdout, _, _ = mypy.api.run(["--version"]) 44 assert isinstance(stdout, str) 45 assert stdout != ""
44 assert isinstance(stdout, str) 45 assert stdout != ""
34 _, options = process_options(matching_version) 35 assert options.python_version == sys.version_info[:2] 36 assert options.python_executable == sys.executable
35 assert options.python_version == sys.version_info[:2] 36 assert options.python_executable == sys.executable 37
39 _, options = process_options(matching_version) 40 assert options.python_version == sys.version_info[:2] 41 assert options.python_executable == sys.executable
40 assert options.python_version == sys.version_info[:2] 41 assert options.python_executable == sys.executable 42
48 _, options = process_options(matching_version) 49 assert options.python_version == sys.version_info[:2] 50 assert options.python_executable == sys.executable
49 assert options.python_version == sys.version_info[:2] 50 assert options.python_executable == sys.executable 51
54 _, options = process_options(matching_version) 55 assert options.python_version == sys.version_info[:2] 56 assert options.python_executable is None
55 assert options.python_version == sys.version_info[:2] 56 assert options.python_executable is None 57
68 infer_python_executable(options, special_opts) 69 assert options.python_version == sys.version_info[:2] 70 assert options.python_executable == sys.executable
69 assert options.python_version == sys.version_info[:2] 70 assert options.python_executable == sys.executable 71
75 infer_python_executable(options, special_opts) 76 assert options.python_version == sys.version_info[:2] 77 assert options.python_executable == sys.executable
76 assert options.python_version == sys.version_info[:2] 77 assert options.python_executable == sys.executable
29 try: 30 import lxml # type: ignore[import-untyped] 31 except ImportError:
172 finally: 173 assert sys.path[0] == plugin_dir 174 del sys.path[0]
183 else:
184 assert incremental_step == 0
185 msg = "Unexpected type checker output ({}, line {})"
249 # just notes attached to other errors. 250 assert error_paths or not busted_paths, "Some modules reported error despite no errors" 251 if not missing_paths == busted_paths:
252 raise AssertionError(f"cache data discrepancy {missing_paths} != {busted_paths}")
253 assert os.path.isfile(os.path.join(manager.options.cache_dir, ".gitignore"))
254 cachedir_tag = os.path.join(manager.options.cache_dir, "CACHEDIR.TAG")
254 cachedir_tag = os.path.join(manager.options.cache_dir, "CACHEDIR.TAG") 255 assert os.path.isfile(cachedir_tag) 256 with open(cachedir_tag) as f:
256 with open(cachedir_tag) as f:
257 assert f.read().startswith("Signature: 8a477f597d28d172789f06886806bc55")
258
318 path = cache.find_module(module_name)
319 assert isinstance(path, str), f"Can't find ad hoc case file: {module_name}"
320 with open(path, encoding="utf8") as f:
10 import re 11 import subprocess 12 import sys
22 try: 23 import lxml # type: ignore[import-untyped] 24 except ImportError:
47 def test_python_cmdline(testcase: DataDrivenTestCase, step: int) -> None: 48 assert testcase.old_cwd is not None, "test was not properly set up" 49 # Write the program to a file.
75 args = [arg.replace("$CWD", os.path.abspath(cwd)) for arg in args]
76 process = subprocess.Popen(
77 fixed + args, stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=cwd, env=env
78 )
79 outb, errb = process.communicate()
13 def test_no_type_variables(self) -> None: 14 assert not infer_constraints(self.fx.o, self.fx.o, SUBTYPE_OF) 15
18 for direction in [SUBTYPE_OF, SUPERTYPE_OF]: 19 assert infer_constraints(fx.gt, fx.ga, direction) == [ 20 Constraint(type_var=fx.t, op=direction, target=fx.a) 21 ] 22
24 fx = self.fx 25 assert infer_constraints( 26 Instance(fx.gvi, [UnpackType(fx.ts)]), Instance(fx.gvi, [fx.a, fx.b]), SUBTYPE_OF 27 ) == [ 28 Constraint(type_var=fx.ts, op=SUBTYPE_OF, target=TupleType([fx.a, fx.b], fx.std_tuple)) 29 ] 30
32 fx = self.fx
33 assert set(
34 infer_constraints(
35 Instance(fx.gvi, [UnpackType(fx.ts)]), Instance(fx.gvi, [fx.a, fx.b]), SUPERTYPE_OF
36 )
37 ) == {
38 Constraint(
39 type_var=fx.ts, op=SUPERTYPE_OF, target=TupleType([fx.a, fx.b], fx.std_tuple)
40 ),
41 Constraint(
42 type_var=fx.ts, op=SUBTYPE_OF, target=TupleType([fx.a, fx.b], fx.std_tuple)
43 ),
44 }
45
47 fx = self.fx
48 assert set(
49 infer_constraints(
50 Instance(fx.gv2i, [fx.t, UnpackType(fx.ts), fx.s]),
51 Instance(fx.gv2i, [fx.a, fx.b, fx.c, fx.d]),
52 SUPERTYPE_OF,
53 )
54 ) == {
55 Constraint(type_var=fx.t, op=SUPERTYPE_OF, target=fx.a),
56 Constraint(
57 type_var=fx.ts, op=SUPERTYPE_OF, target=TupleType([fx.b, fx.c], fx.std_tuple)
58 ),
59 Constraint(
60 type_var=fx.ts, op=SUBTYPE_OF, target=TupleType([fx.b, fx.c], fx.std_tuple)
61 ),
62 Constraint(type_var=fx.s, op=SUPERTYPE_OF, target=fx.d),
63 }
64
66 fx = self.fx
67 assert set(
68 infer_constraints(
69 Instance(fx.gvi, [UnpackType(Instance(fx.std_tuplei, [fx.t]))]),
70 Instance(fx.gvi, [fx.a, fx.b]),
71 SUPERTYPE_OF,
72 )
73 ) == {
74 Constraint(type_var=fx.t, op=SUPERTYPE_OF, target=fx.a),
75 Constraint(type_var=fx.t, op=SUBTYPE_OF, target=fx.a),
76 Constraint(type_var=fx.t, op=SUPERTYPE_OF, target=fx.b),
77 Constraint(type_var=fx.t, op=SUBTYPE_OF, target=fx.b),
78 }
79
81 fx = self.fx
82 assert set(
83 infer_constraints(
84 Instance(fx.gv2i, [fx.t, UnpackType(Instance(fx.std_tuplei, [fx.s])), fx.u]),
85 Instance(fx.gv2i, [fx.a, fx.b, fx.c, fx.d]),
86 SUPERTYPE_OF,
87 )
88 ) == {
89 Constraint(type_var=fx.t, op=SUPERTYPE_OF, target=fx.a),
90 Constraint(type_var=fx.s, op=SUPERTYPE_OF, target=fx.b),
91 Constraint(type_var=fx.s, op=SUBTYPE_OF, target=fx.b),
92 Constraint(type_var=fx.s, op=SUPERTYPE_OF, target=fx.c),
93 Constraint(type_var=fx.s, op=SUBTYPE_OF, target=fx.c),
94 Constraint(type_var=fx.u, op=SUPERTYPE_OF, target=fx.d),
95 }
96
98 fx = self.fx
99 assert set(
100 infer_constraints(
101 Instance(fx.gv2i, [fx.u, fx.t, fx.s, fx.u]),
102 Instance(fx.gv2i, [fx.a, fx.b, fx.c, fx.d]),
103 SUPERTYPE_OF,
104 )
105 ) == {
106 Constraint(type_var=fx.u, op=SUPERTYPE_OF, target=fx.a),
107 Constraint(type_var=fx.t, op=SUPERTYPE_OF, target=fx.b),
108 Constraint(type_var=fx.t, op=SUBTYPE_OF, target=fx.b),
109 Constraint(type_var=fx.s, op=SUPERTYPE_OF, target=fx.c),
110 Constraint(type_var=fx.s, op=SUBTYPE_OF, target=fx.c),
111 Constraint(type_var=fx.u, op=SUPERTYPE_OF, target=fx.d),
112 }
113
115 fx = self.fx
116 assert set(
117 infer_constraints(
118 Instance(fx.gv2i, [fx.u, fx.t, fx.s, fx.u]),
119 Instance(fx.gv2i, [fx.a, fx.b, fx.d]),
120 SUPERTYPE_OF,
121 )
122 # We still get constraints on the prefix/suffix in this case.
123 ) == {
124 Constraint(type_var=fx.u, op=SUPERTYPE_OF, target=fx.a),
125 Constraint(type_var=fx.u, op=SUPERTYPE_OF, target=fx.d),
126 }
127
129 fx = self.fx 130 assert not infer_constraints( 131 TupleType([fx.t, fx.s], fallback=Instance(fx.std_tuplei, [fx.o])), 132 Instance(fx.std_tuplei, [fx.a]), 133 SUPERTYPE_OF, 134 )
10 import os 11 import subprocess 12 import sys
38 def test_daemon(testcase: DataDrivenTestCase) -> None: 39 assert testcase.old_cwd is not None, "test was not properly set up" 40 for i, step in enumerate(parse_script(testcase.input)):
42 expected_lines = step[1:]
43 assert cmd.startswith("$")
44 cmd = cmd[1:].strip()
69 if step:
70 assert step[0].startswith("$")
71 steps.append(step)
89 output = subprocess.check_output( 90 input, shell=True, stderr=subprocess.STDOUT, text=True, cwd=test_temp_dir, env=env 91 ) 92 return 0, output 93 except subprocess.CalledProcessError as err:
124 )
125 assert res == {"a", "b", "c", "d", "f", "long_name"}
126
41 42 assert ( 43 files1 is not None and files2 is not None 44 ), "cases where CompileError occurred should not be run" 45 prefix = "__main__"
41 except CompileError as e: 42 assert e.messages == [] 43
102 103 assert testcase.tmpdir is not None 104 a.extend(self.maybe_suggest(step, server, main_src, testcase.tmpdir))
171 out = response["out"] or response["err"] 172 assert isinstance(out, str) 173 return out.splitlines()
249 a = new_messages 250 assert testcase.tmpdir is not None 251 a.extend(self.maybe_suggest(step, server, main_src, testcase.tmpdir))
380 old_msgs = ['foo/y.py:12: note: "Y" not defined', 'x.py:8: error: "str" not callable'] 381 assert sort_messages_preserving_file_order(msgs, old_msgs) == list(reversed(msgs)) 382 assert sort_messages_preserving_file_order(list(reversed(msgs)), old_msgs) == list(
381 assert sort_messages_preserving_file_order(msgs, old_msgs) == list(reversed(msgs)) 382 assert sort_messages_preserving_file_order(list(reversed(msgs)), old_msgs) == list( 383 reversed(msgs) 384 ) 385
399 old_msgs = ['foo/y.py:12: note: "Y" not defined', 'x.py:8: error: "str" not callable'] 400 assert sort_messages_preserving_file_order(msg1 + msg2, old_msgs) == msg2 + msg1 401 assert sort_messages_preserving_file_order(msg2 + msg1, old_msgs) == msg2 + msg1
400 assert sort_messages_preserving_file_order(msg1 + msg2, old_msgs) == msg2 + msg1 401 assert sort_messages_preserving_file_order(msg2 + msg1, old_msgs) == msg2 + msg1 402
413 ] 414 assert sort_messages_preserving_file_order([msg1, msg2, msg3], old_msgs) == [ 415 msg2, 416 msg1, 417 msg3, 418 ] 419 assert sort_messages_preserving_file_order([msg3, msg2, msg1], old_msgs) == [
418 ] 419 assert sort_messages_preserving_file_order([msg3, msg2, msg1], old_msgs) == [ 420 msg2, 421 msg1, 422 msg3, 423 ] 424
430 old_msgs = ['foo/y.py:12: note: "Y" not defined', 'x.py:8: error: "str" not callable'] 431 assert sort_messages_preserving_file_order([msg1, msg2, new1], old_msgs) == [ 432 msg2, 433 msg1, 434 new1, 435 ] 436 assert sort_messages_preserving_file_order([new1, msg1, msg2, new2], old_msgs) == [
435 ] 436 assert sort_messages_preserving_file_order([new1, msg1, msg2, new2], old_msgs) == [ 437 msg2, 438 msg1, 439 new1, 440 new2, 441 ]
9 def test_trim_source(self) -> None:
10 assert trim_source_line("0123456789abcdef", max_len=16, col=5, min_width=2) == (
11 "0123456789abcdef",
12 0,
13 )
14
15 # Locations near start.
16 assert trim_source_line("0123456789abcdef", max_len=7, col=0, min_width=2) == (
17 "0123456...",
18 0,
19 )
20 assert trim_source_line("0123456789abcdef", max_len=7, col=4, min_width=2) == (
19 )
20 assert trim_source_line("0123456789abcdef", max_len=7, col=4, min_width=2) == (
21 "0123456...",
22 0,
23 )
24
25 # Middle locations.
26 assert trim_source_line("0123456789abcdef", max_len=7, col=5, min_width=2) == (
27 "...1234567...",
28 -2,
29 )
30 assert trim_source_line("0123456789abcdef", max_len=7, col=6, min_width=2) == (
29 )
30 assert trim_source_line("0123456789abcdef", max_len=7, col=6, min_width=2) == (
31 "...2345678...",
32 -1,
33 )
34 assert trim_source_line("0123456789abcdef", max_len=7, col=8, min_width=2) == (
33 )
34 assert trim_source_line("0123456789abcdef", max_len=7, col=8, min_width=2) == (
35 "...456789a...",
36 1,
37 )
38
39 # Locations near the end.
40 assert trim_source_line("0123456789abcdef", max_len=7, col=11, min_width=2) == (
41 "...789abcd...",
42 4,
43 )
44 assert trim_source_line("0123456789abcdef", max_len=7, col=13, min_width=2) == (
43 )
44 assert trim_source_line("0123456789abcdef", max_len=7, col=13, min_width=2) == (
45 "...9abcdef",
46 6,
47 )
48 assert trim_source_line("0123456789abcdef", max_len=7, col=15, min_width=2) == (
47 )
48 assert trim_source_line("0123456789abcdef", max_len=7, col=15, min_width=2) == (
49 "...9abcdef",
50 6,
51 )
52
53 def test_split_words(self) -> None:
54 assert split_words("Simple message") == ["Simple", "message"]
55 assert split_words('Message with "Some[Long, Types]" in it') == [
54 assert split_words("Simple message") == ["Simple", "message"]
55 assert split_words('Message with "Some[Long, Types]" in it') == [
56 "Message",
57 "with",
58 '"Some[Long, Types]"',
59 "in",
60 "it",
61 ]
62 assert split_words('Message with "Some[Long, Types]" and [error-code]') == [
61 ]
62 assert split_words('Message with "Some[Long, Types]" and [error-code]') == [
63 "Message",
64 "with",
65 '"Some[Long, Types]"',
66 "and",
67 "[error-code]",
68 ]
69 assert split_words('"Type[Stands, First]" then words') == [
68 ]
69 assert split_words('"Type[Stands, First]" then words') == [
70 '"Type[Stands, First]"',
71 "then",
72 "words",
73 ]
74 assert split_words('First words "Then[Stands, Type]"') == [
73 ]
74 assert split_words('First words "Then[Stands, Type]"') == [
75 "First",
76 "words",
77 '"Then[Stands, Type]"',
78 ]
79 assert split_words('"Type[Only, Here]"') == ['"Type[Only, Here]"']
78 ]
79 assert split_words('"Type[Only, Here]"') == ['"Type[Only, Here]"']
80 assert split_words("OneWord") == ["OneWord"]
79 assert split_words('"Type[Only, Here]"') == ['"Type[Only, Here]"']
80 assert split_words("OneWord") == ["OneWord"]
81 assert split_words(" ") == ["", ""]
80 assert split_words("OneWord") == ["OneWord"]
81 assert split_words(" ") == ["", ""]
82
29 for i in range(2):
30 assert self.isfile_case("bar.py")
31 assert self.isfile_case("pkg/sub_package/__init__.py")
30 assert self.isfile_case("bar.py")
31 assert self.isfile_case("pkg/sub_package/__init__.py")
32 assert self.isfile_case("pkg/sub_package/foo.py")
31 assert self.isfile_case("pkg/sub_package/__init__.py")
32 assert self.isfile_case("pkg/sub_package/foo.py")
33 assert not self.isfile_case("non_existent.py")
32 assert self.isfile_case("pkg/sub_package/foo.py")
33 assert not self.isfile_case("non_existent.py")
34 assert not self.isfile_case("pkg/non_existent.py")
33 assert not self.isfile_case("non_existent.py")
34 assert not self.isfile_case("pkg/non_existent.py")
35 assert not self.isfile_case("pkg/")
34 assert not self.isfile_case("pkg/non_existent.py")
35 assert not self.isfile_case("pkg/")
36 assert not self.isfile_case("bar.py/")
35 assert not self.isfile_case("pkg/")
36 assert not self.isfile_case("bar.py/")
37 for i in range(2):
37 for i in range(2):
38 assert not self.isfile_case("Bar.py")
39 assert not self.isfile_case("pkg/sub_package/__init__.PY")
38 assert not self.isfile_case("Bar.py")
39 assert not self.isfile_case("pkg/sub_package/__init__.PY")
40 assert not self.isfile_case("pkg/Sub_Package/foo.py")
39 assert not self.isfile_case("pkg/sub_package/__init__.PY")
40 assert not self.isfile_case("pkg/Sub_Package/foo.py")
41 assert not self.isfile_case("Pkg/sub_package/foo.py")
40 assert not self.isfile_case("pkg/Sub_Package/foo.py")
41 assert not self.isfile_case("Pkg/sub_package/foo.py")
42
49 for i in range(2):
50 assert not self.isfile_case("Bar.py")
51 assert not self.isfile_case("pkg/sub_package/__init__.PY")
50 assert not self.isfile_case("Bar.py")
51 assert not self.isfile_case("pkg/sub_package/__init__.PY")
52 assert not self.isfile_case("pkg/Sub_Package/foo.py")
51 assert not self.isfile_case("pkg/sub_package/__init__.PY")
52 assert not self.isfile_case("pkg/Sub_Package/foo.py")
53 assert not self.isfile_case("Pkg/sub_package/foo.py")
52 assert not self.isfile_case("pkg/Sub_Package/foo.py")
53 assert not self.isfile_case("Pkg/sub_package/foo.py")
54 for i in range(2):
54 for i in range(2):
55 assert self.isfile_case("bar.py")
56 assert self.isfile_case("pkg/sub_package/__init__.py")
55 assert self.isfile_case("bar.py")
56 assert self.isfile_case("pkg/sub_package/__init__.py")
57 assert self.isfile_case("pkg/sub_package/foo.py")
56 assert self.isfile_case("pkg/sub_package/__init__.py")
57 assert self.isfile_case("pkg/sub_package/foo.py")
58 assert not self.isfile_case("non_existent.py")
57 assert self.isfile_case("pkg/sub_package/foo.py")
58 assert not self.isfile_case("non_existent.py")
59 assert not self.isfile_case("pkg/non_existent.py")
58 assert not self.isfile_case("non_existent.py")
59 assert not self.isfile_case("pkg/non_existent.py")
60
66 for i in range(2):
67 assert self.isfile_case("bar.py")
68 assert not self.isfile_case("non_existent.py")
67 assert self.isfile_case("bar.py")
68 assert not self.isfile_case("non_existent.py")
69 assert not self.isfile_case("pkg/non_existent.py")
68 assert not self.isfile_case("non_existent.py")
69 assert not self.isfile_case("pkg/non_existent.py")
70 assert not self.isfile_case("Bar.py")
69 assert not self.isfile_case("pkg/non_existent.py")
70 assert not self.isfile_case("Bar.py")
71 assert not self.isfile_case("pkg/sub_package/__init__.PY")
70 assert not self.isfile_case("Bar.py")
71 assert not self.isfile_case("pkg/sub_package/__init__.PY")
72 assert not self.isfile_case("pkg/Sub_Package/foo.py")
71 assert not self.isfile_case("pkg/sub_package/__init__.PY")
72 assert not self.isfile_case("pkg/Sub_Package/foo.py")
73 assert not self.isfile_case("Pkg/sub_package/foo.py")
72 assert not self.isfile_case("pkg/Sub_Package/foo.py")
73 assert not self.isfile_case("Pkg/sub_package/foo.py")
74 assert self.isfile_case("pkg/sub_package/__init__.py")
73 assert not self.isfile_case("Pkg/sub_package/foo.py")
74 assert self.isfile_case("pkg/sub_package/__init__.py")
75 assert self.isfile_case("pkg/sub_package/foo.py")
74 assert self.isfile_case("pkg/sub_package/__init__.py")
75 assert self.isfile_case("pkg/sub_package/foo.py")
76
81 self.make_file("pkg/other_dir.py", base=other)
82 assert self.isfile_case(os.path.join(other, "other_dir.py"))
83 assert not self.isfile_case(os.path.join(other, "Other_Dir.py"))
82 assert self.isfile_case(os.path.join(other, "other_dir.py")) 83 assert not self.isfile_case(os.path.join(other, "Other_Dir.py")) 84 assert not self.isfile_case(os.path.join(other, "bar.py"))
83 assert not self.isfile_case(os.path.join(other, "Other_Dir.py")) 84 assert not self.isfile_case(os.path.join(other, "bar.py")) 85 if os.path.exists(os.path.join(other, "PKG/other_dir.py")):
87 # this path is not under the prefix, case difference is fine. 88 assert self.isfile_case(os.path.join(other, "PKG/other_dir.py")) 89
51 with IPCClient(connection_name, timeout=1) as client:
52 assert client.read() == msg
53 client.write("test")
64 with IPCClient(connection_name, timeout=1) as client:
65 assert client.read() == msg
66 client.write("") # don't let the server hang up yet, we want to connect again.
68 with IPCClient(connection_name, timeout=1) as client:
69 assert client.read() == msg
70 client.write("test")
73 p.join() 74 assert p.exitcode == 0 75
85 client.write(fancy_text.decode("utf-8"))
86 assert client.read() == fancy_text.decode("utf-8")
87
90 time.sleep(0) # yield to the server to force reading of all messages by server. 91 assert client.read() == "Test with spaces" 92 assert client.read() == "Test write before reading previous"
91 assert client.read() == "Test with spaces" 92 assert client.read() == "Test write before reading previous" 93
94 client.write("quit")
95 assert client.read() == "quit"
96 queue.close()
98 p.join() 99 assert p.exitcode == 0 100
44 self.str_conv = StrConv(show_ids=True, options=Options()) 45 assert self.str_conv.id_mapper is not None 46 self.id_mapper: IdMapper = self.str_conv.id_mapper
63 result = self.build(main_src, testcase) 64 assert result is not None, "cases where CompileError occurred should not be run" 65 result.manager.fscache.flush()
87 # Verify that old AST nodes are removed from the expression type map. 88 assert expr not in new_types 89
144 return self.dump_types(modules, manager)
145 assert False, f"Invalid kind {kind}"
146
213 tree = manager.graph[module_id].tree
214 assert tree is not None
215 type_map = {
13 if os.getenv("TEST_MYPYC", None) == "1":
14 assert not mypy.__file__.endswith(".py"), "Expected to find a mypyc-compiled version"
57 58 assert normalized_output == testcase.output
110 if e.module_with_blocker is not None: 111 assert e.module_with_blocker == "__main__" 112 # Verify that there was a compile error and that the error messages
4 import re 5 import subprocess 6 import sys
36 with tempfile.TemporaryDirectory() as venv_dir: 37 proc = subprocess.run( 38 [python_executable, "-m", "venv", venv_dir], cwd=os.getcwd(), capture_output=True 39 ) 40 if proc.returncode != 0:
62 with filelock.FileLock(pip_lock, timeout=pip_timeout): 63 proc = subprocess.run(install_cmd, capture_output=True, env=os.environ) 64 except filelock.Timeout as err:
87 with filelock.FileLock(pip_lock, timeout=pip_timeout): 88 proc = subprocess.run(install_cmd, cwd=working_dir, capture_output=True, env=env) 89 except filelock.Timeout as err:
96 """Test running mypy on files that depend on PEP 561 packages.""" 97 assert testcase.old_cwd is not None, "test was not properly set up" 98 python = sys.executable
99 100 assert python is not None, "Should be impossible" 101 pkgs, pip_args = parse_pkgs(testcase.input[0])
108 raise ValueError(f"Unknown pip argument: {arg}")
109 assert pkgs, "No packages to install for PEP 561 test?"
110 with virtualenv(python) as venv:
17 import re 18 import subprocess 19 import sys
46 """ 47 assert testcase.old_cwd is not None, "test was not properly set up" 48 # We must enable site packages to get access to installed stubs.
98 # Execute the program. 99 proc = subprocess.run( 100 [interpreter, "-Wignore", program], cwd=test_temp_dir, capture_output=True 101 ) 102 output.extend(split_lines(proc.stdout, proc.stderr))
10 try: 11 import lxml # type: ignore[import-untyped] 12 except ImportError:
25 def test_as_xml(self) -> None: 26 import lxml.etree as etree # type: ignore[import-untyped] 27
184 if isinstance(n.node, TypeInfo): 185 assert n.fullname 186 if any(n.fullname.startswith(m + ".") for m in testcase.test_modules):
117 collect_build_targets(opts, mypy_options(opts)) 118 assert captured_output.getvalue() == "" 119 finally:
584 def test_infer_sig_from_docstring_square_brackets(self) -> None:
585 assert (
586 infer_sig_from_docstring("fetch_row([maxrows, how]) -- Fetches stuff", "fetch_row")
587 == []
588 )
589
741 def test_common_dir_prefix_unix(self) -> None: 742 assert common_dir_prefix([]) == "." 743 assert common_dir_prefix(["x.pyi"]) == "."
742 assert common_dir_prefix([]) == "." 743 assert common_dir_prefix(["x.pyi"]) == "." 744 assert common_dir_prefix(["./x.pyi"]) == "."
743 assert common_dir_prefix(["x.pyi"]) == "." 744 assert common_dir_prefix(["./x.pyi"]) == "." 745 assert common_dir_prefix(["foo/bar/x.pyi"]) == "foo/bar"
744 assert common_dir_prefix(["./x.pyi"]) == "." 745 assert common_dir_prefix(["foo/bar/x.pyi"]) == "foo/bar" 746 assert common_dir_prefix(["foo/bar/x.pyi", "foo/bar/y.pyi"]) == "foo/bar"
745 assert common_dir_prefix(["foo/bar/x.pyi"]) == "foo/bar" 746 assert common_dir_prefix(["foo/bar/x.pyi", "foo/bar/y.pyi"]) == "foo/bar" 747 assert common_dir_prefix(["foo/bar/x.pyi", "foo/y.pyi"]) == "foo"
746 assert common_dir_prefix(["foo/bar/x.pyi", "foo/bar/y.pyi"]) == "foo/bar" 747 assert common_dir_prefix(["foo/bar/x.pyi", "foo/y.pyi"]) == "foo" 748 assert common_dir_prefix(["foo/x.pyi", "foo/bar/y.pyi"]) == "foo"
747 assert common_dir_prefix(["foo/bar/x.pyi", "foo/y.pyi"]) == "foo" 748 assert common_dir_prefix(["foo/x.pyi", "foo/bar/y.pyi"]) == "foo" 749 assert common_dir_prefix(["foo/bar/zar/x.pyi", "foo/y.pyi"]) == "foo"
748 assert common_dir_prefix(["foo/x.pyi", "foo/bar/y.pyi"]) == "foo" 749 assert common_dir_prefix(["foo/bar/zar/x.pyi", "foo/y.pyi"]) == "foo" 750 assert common_dir_prefix(["foo/x.pyi", "foo/bar/zar/y.pyi"]) == "foo"
749 assert common_dir_prefix(["foo/bar/zar/x.pyi", "foo/y.pyi"]) == "foo" 750 assert common_dir_prefix(["foo/x.pyi", "foo/bar/zar/y.pyi"]) == "foo" 751 assert common_dir_prefix(["foo/bar/zar/x.pyi", "foo/bar/y.pyi"]) == "foo/bar"
750 assert common_dir_prefix(["foo/x.pyi", "foo/bar/zar/y.pyi"]) == "foo" 751 assert common_dir_prefix(["foo/bar/zar/x.pyi", "foo/bar/y.pyi"]) == "foo/bar" 752 assert common_dir_prefix(["foo/bar/x.pyi", "foo/bar/zar/y.pyi"]) == "foo/bar"
751 assert common_dir_prefix(["foo/bar/zar/x.pyi", "foo/bar/y.pyi"]) == "foo/bar" 752 assert common_dir_prefix(["foo/bar/x.pyi", "foo/bar/zar/y.pyi"]) == "foo/bar" 753 assert common_dir_prefix([r"foo/bar\x.pyi"]) == "foo"
752 assert common_dir_prefix(["foo/bar/x.pyi", "foo/bar/zar/y.pyi"]) == "foo/bar" 753 assert common_dir_prefix([r"foo/bar\x.pyi"]) == "foo" 754 assert common_dir_prefix([r"foo\bar/x.pyi"]) == r"foo\bar"
753 assert common_dir_prefix([r"foo/bar\x.pyi"]) == "foo" 754 assert common_dir_prefix([r"foo\bar/x.pyi"]) == r"foo\bar" 755
759 def test_common_dir_prefix_win(self) -> None: 760 assert common_dir_prefix(["x.pyi"]) == "." 761 assert common_dir_prefix([r".\x.pyi"]) == "."
760 assert common_dir_prefix(["x.pyi"]) == "." 761 assert common_dir_prefix([r".\x.pyi"]) == "." 762 assert common_dir_prefix([r"foo\bar\x.pyi"]) == r"foo\bar"
761 assert common_dir_prefix([r".\x.pyi"]) == "." 762 assert common_dir_prefix([r"foo\bar\x.pyi"]) == r"foo\bar" 763 assert common_dir_prefix([r"foo\bar\x.pyi", r"foo\bar\y.pyi"]) == r"foo\bar"
762 assert common_dir_prefix([r"foo\bar\x.pyi"]) == r"foo\bar" 763 assert common_dir_prefix([r"foo\bar\x.pyi", r"foo\bar\y.pyi"]) == r"foo\bar" 764 assert common_dir_prefix([r"foo\bar\x.pyi", r"foo\y.pyi"]) == "foo"
763 assert common_dir_prefix([r"foo\bar\x.pyi", r"foo\bar\y.pyi"]) == r"foo\bar" 764 assert common_dir_prefix([r"foo\bar\x.pyi", r"foo\y.pyi"]) == "foo" 765 assert common_dir_prefix([r"foo\x.pyi", r"foo\bar\y.pyi"]) == "foo"
764 assert common_dir_prefix([r"foo\bar\x.pyi", r"foo\y.pyi"]) == "foo" 765 assert common_dir_prefix([r"foo\x.pyi", r"foo\bar\y.pyi"]) == "foo" 766 assert common_dir_prefix([r"foo\bar\zar\x.pyi", r"foo\y.pyi"]) == "foo"
765 assert common_dir_prefix([r"foo\x.pyi", r"foo\bar\y.pyi"]) == "foo" 766 assert common_dir_prefix([r"foo\bar\zar\x.pyi", r"foo\y.pyi"]) == "foo" 767 assert common_dir_prefix([r"foo\x.pyi", r"foo\bar\zar\y.pyi"]) == "foo"
766 assert common_dir_prefix([r"foo\bar\zar\x.pyi", r"foo\y.pyi"]) == "foo" 767 assert common_dir_prefix([r"foo\x.pyi", r"foo\bar\zar\y.pyi"]) == "foo" 768 assert common_dir_prefix([r"foo\bar\zar\x.pyi", r"foo\bar\y.pyi"]) == r"foo\bar"
767 assert common_dir_prefix([r"foo\x.pyi", r"foo\bar\zar\y.pyi"]) == "foo" 768 assert common_dir_prefix([r"foo\bar\zar\x.pyi", r"foo\bar\y.pyi"]) == r"foo\bar" 769 assert common_dir_prefix([r"foo\bar\x.pyi", r"foo\bar\zar\y.pyi"]) == r"foo\bar"
768 assert common_dir_prefix([r"foo\bar\zar\x.pyi", r"foo\bar\y.pyi"]) == r"foo\bar" 769 assert common_dir_prefix([r"foo\bar\x.pyi", r"foo\bar\zar\y.pyi"]) == r"foo\bar" 770 assert common_dir_prefix([r"foo/bar\x.pyi"]) == r"foo\bar"
769 assert common_dir_prefix([r"foo\bar\x.pyi", r"foo\bar\zar\y.pyi"]) == r"foo\bar" 770 assert common_dir_prefix([r"foo/bar\x.pyi"]) == r"foo\bar" 771 assert common_dir_prefix([r"foo\bar/x.pyi"]) == r"foo\bar"
770 assert common_dir_prefix([r"foo/bar\x.pyi"]) == r"foo\bar" 771 assert common_dir_prefix([r"foo\bar/x.pyi"]) == r"foo\bar" 772 assert common_dir_prefix([r"foo/bar/x.pyi"]) == r"foo\bar"
771 assert common_dir_prefix([r"foo\bar/x.pyi"]) == r"foo\bar" 772 assert common_dir_prefix([r"foo/bar/x.pyi"]) == r"foo\bar" 773
781 ) 782 assert ctx.fullname == "spangle.Parent.Nested.foo" 783
786 def test_is_blacklisted_path(self) -> None:
787 assert not is_blacklisted_path("foo/bar.py")
788 assert not is_blacklisted_path("foo.py")
787 assert not is_blacklisted_path("foo/bar.py")
788 assert not is_blacklisted_path("foo.py")
789 assert not is_blacklisted_path("foo/xvendor/bar.py")
788 assert not is_blacklisted_path("foo.py")
789 assert not is_blacklisted_path("foo/xvendor/bar.py")
790 assert not is_blacklisted_path("foo/vendorx/bar.py")
789 assert not is_blacklisted_path("foo/xvendor/bar.py")
790 assert not is_blacklisted_path("foo/vendorx/bar.py")
791 assert is_blacklisted_path("foo/vendor/bar.py")
790 assert not is_blacklisted_path("foo/vendorx/bar.py")
791 assert is_blacklisted_path("foo/vendor/bar.py")
792 assert is_blacklisted_path("foo/vendored/bar.py")
791 assert is_blacklisted_path("foo/vendor/bar.py")
792 assert is_blacklisted_path("foo/vendored/bar.py")
793 assert is_blacklisted_path("foo/vendored/bar/thing.py")
792 assert is_blacklisted_path("foo/vendored/bar.py")
793 assert is_blacklisted_path("foo/vendored/bar/thing.py")
794 assert is_blacklisted_path("foo/six.py")
793 assert is_blacklisted_path("foo/vendored/bar/thing.py")
794 assert is_blacklisted_path("foo/six.py")
795
796 def test_is_non_library_module(self) -> None:
797 assert not is_non_library_module("foo")
798 assert not is_non_library_module("foo.bar")
797 assert not is_non_library_module("foo")
798 assert not is_non_library_module("foo.bar")
799
801 # don't treat them as such since they could plausibly be real modules.
802 assert not is_non_library_module("foo.bartest")
803 assert not is_non_library_module("foo.bartests")
802 assert not is_non_library_module("foo.bartest")
803 assert not is_non_library_module("foo.bartests")
804 assert not is_non_library_module("foo.testbar")
803 assert not is_non_library_module("foo.bartests")
804 assert not is_non_library_module("foo.testbar")
805
805
806 assert is_non_library_module("foo.test")
807 assert is_non_library_module("foo.test.foo")
806 assert is_non_library_module("foo.test")
807 assert is_non_library_module("foo.test.foo")
808 assert is_non_library_module("foo.tests")
807 assert is_non_library_module("foo.test.foo")
808 assert is_non_library_module("foo.tests")
809 assert is_non_library_module("foo.tests.foo")
808 assert is_non_library_module("foo.tests")
809 assert is_non_library_module("foo.tests.foo")
810 assert is_non_library_module("foo.testing.foo")
809 assert is_non_library_module("foo.tests.foo")
810 assert is_non_library_module("foo.testing.foo")
811 assert is_non_library_module("foo.SelfTest.foo")
810 assert is_non_library_module("foo.testing.foo")
811 assert is_non_library_module("foo.SelfTest.foo")
812
812
813 assert is_non_library_module("foo.test_bar")
814 assert is_non_library_module("foo.bar_tests")
813 assert is_non_library_module("foo.test_bar")
814 assert is_non_library_module("foo.bar_tests")
815 assert is_non_library_module("foo.testing")
814 assert is_non_library_module("foo.bar_tests")
815 assert is_non_library_module("foo.testing")
816 assert is_non_library_module("foo.conftest")
815 assert is_non_library_module("foo.testing")
816 assert is_non_library_module("foo.conftest")
817 assert is_non_library_module("foo.bar_test_util")
816 assert is_non_library_module("foo.conftest")
817 assert is_non_library_module("foo.bar_test_util")
818 assert is_non_library_module("foo.bar_test_utils")
817 assert is_non_library_module("foo.bar_test_util")
818 assert is_non_library_module("foo.bar_test_utils")
819 assert is_non_library_module("foo.bar_test_base")
818 assert is_non_library_module("foo.bar_test_utils")
819 assert is_non_library_module("foo.bar_test_base")
820
820
821 assert is_non_library_module("foo.setup")
822
822
823 assert is_non_library_module("foo.__main__")
824
1574 def test_is_valid_type(self) -> None:
1575 assert is_valid_type("int")
1576 assert is_valid_type("str")
1575 assert is_valid_type("int")
1576 assert is_valid_type("str")
1577 assert is_valid_type("Foo_Bar234")
1576 assert is_valid_type("str")
1577 assert is_valid_type("Foo_Bar234")
1578 assert is_valid_type("foo.bar")
1577 assert is_valid_type("Foo_Bar234")
1578 assert is_valid_type("foo.bar")
1579 assert is_valid_type("List[int]")
1578 assert is_valid_type("foo.bar")
1579 assert is_valid_type("List[int]")
1580 assert is_valid_type("Dict[str, int]")
1579 assert is_valid_type("List[int]")
1580 assert is_valid_type("Dict[str, int]")
1581 assert is_valid_type("None")
1580 assert is_valid_type("Dict[str, int]")
1581 assert is_valid_type("None")
1582 assert is_valid_type("Literal[26]")
1581 assert is_valid_type("None")
1582 assert is_valid_type("Literal[26]")
1583 assert is_valid_type("Literal[0x1A]")
1582 assert is_valid_type("Literal[26]")
1583 assert is_valid_type("Literal[0x1A]")
1584 assert is_valid_type('Literal["hello world"]')
1583 assert is_valid_type("Literal[0x1A]")
1584 assert is_valid_type('Literal["hello world"]')
1585 assert is_valid_type('Literal[b"hello world"]')
1584 assert is_valid_type('Literal["hello world"]')
1585 assert is_valid_type('Literal[b"hello world"]')
1586 assert is_valid_type('Literal[u"hello world"]')
1585 assert is_valid_type('Literal[b"hello world"]')
1586 assert is_valid_type('Literal[u"hello world"]')
1587 assert is_valid_type("Literal[True]")
1586 assert is_valid_type('Literal[u"hello world"]')
1587 assert is_valid_type("Literal[True]")
1588 assert is_valid_type("Literal[Color.RED]")
1587 assert is_valid_type("Literal[True]")
1588 assert is_valid_type("Literal[Color.RED]")
1589 assert is_valid_type("Literal[None]")
1588 assert is_valid_type("Literal[Color.RED]")
1589 assert is_valid_type("Literal[None]")
1590 assert is_valid_type("str | int")
1589 assert is_valid_type("Literal[None]")
1590 assert is_valid_type("str | int")
1591 assert is_valid_type("dict[str, int] | int")
1590 assert is_valid_type("str | int")
1591 assert is_valid_type("dict[str, int] | int")
1592 assert is_valid_type("tuple[str, ...]")
1591 assert is_valid_type("dict[str, int] | int")
1592 assert is_valid_type("tuple[str, ...]")
1593 assert is_valid_type(
1592 assert is_valid_type("tuple[str, ...]")
1593 assert is_valid_type(
1594 'Literal[26, 0x1A, "hello world", b"hello world", u"hello world", True, Color.RED, None]'
1595 )
1596 assert not is_valid_type("foo-bar")
1595 )
1596 assert not is_valid_type("foo-bar")
1597 assert not is_valid_type("x->y")
1596 assert not is_valid_type("foo-bar")
1597 assert not is_valid_type("x->y")
1598 assert not is_valid_type("True")
1597 assert not is_valid_type("x->y")
1598 assert not is_valid_type("True")
1599 assert not is_valid_type("False")
1598 assert not is_valid_type("True")
1599 assert not is_valid_type("False")
1600 assert not is_valid_type("x,y")
1599 assert not is_valid_type("False")
1600 assert not is_valid_type("x,y")
1601 assert not is_valid_type("x, y")
1600 assert not is_valid_type("x,y")
1601 assert not is_valid_type("x, y")
1602
1607 p = m.get_package_properties("inspect")
1608 assert p is not None
1609 assert p.name == "inspect"
1608 assert p is not None 1609 assert p.name == "inspect" 1610 assert p.file
1609 assert p.name == "inspect" 1610 assert p.file 1611 assert p.path is None
1610 assert p.file 1611 assert p.path is None 1612 assert p.is_c_module is False
1611 assert p.path is None 1612 assert p.is_c_module is False 1613 assert p.subpackages == []
1612 assert p.is_c_module is False 1613 assert p.subpackages == [] 1614
1617 p = m.get_package_properties("unittest")
1618 assert p is not None
1619 assert p.name == "unittest"
1618 assert p is not None 1619 assert p.name == "unittest" 1620 assert p.file
1619 assert p.name == "unittest" 1620 assert p.file 1621 assert p.path
1620 assert p.file 1621 assert p.path 1622 assert p.is_c_module is False
1621 assert p.path 1622 assert p.is_c_module is False 1623 assert p.subpackages
1622 assert p.is_c_module is False
1623 assert p.subpackages
1624 assert all(sub.startswith("unittest.") for sub in p.subpackages)
1623 assert p.subpackages
1624 assert all(sub.startswith("unittest.") for sub in p.subpackages)
1625
1628 p = m.get_package_properties("_socket")
1629 assert p is not None
1630 assert p.name == "_socket"
1629 assert p is not None 1630 assert p.name == "_socket" 1631 assert p.path is None
1630 assert p.name == "_socket" 1631 assert p.path is None 1632 assert p.is_c_module is True
1631 assert p.path is None 1632 assert p.is_c_module is True 1633 assert p.subpackages == []
1632 assert p.is_c_module is True 1633 assert p.subpackages == [] 1634
1638 m.get_package_properties("foobar-non-existent")
1639 assert str(e.exception) == "No module named 'foobar-non-existent'"
1640
14 def test_is_legacy_bundled_packages(self) -> None:
15 assert not is_module_from_legacy_bundled_package("foobar_asdf")
16 assert not is_module_from_legacy_bundled_package("PIL")
15 assert not is_module_from_legacy_bundled_package("foobar_asdf")
16 assert not is_module_from_legacy_bundled_package("PIL")
17 assert is_module_from_legacy_bundled_package("pycurl")
16 assert not is_module_from_legacy_bundled_package("PIL")
17 assert is_module_from_legacy_bundled_package("pycurl")
18 assert is_module_from_legacy_bundled_package("dateparser")
17 assert is_module_from_legacy_bundled_package("pycurl")
18 assert is_module_from_legacy_bundled_package("dateparser")
19
20 def test_stub_distribution_name(self) -> None:
21 assert stub_distribution_name("foobar_asdf") is None
22 assert stub_distribution_name("pycurl") == "types-pycurl"
21 assert stub_distribution_name("foobar_asdf") is None
22 assert stub_distribution_name("pycurl") == "types-pycurl"
23 assert stub_distribution_name("psutil") == "types-psutil"
22 assert stub_distribution_name("pycurl") == "types-pycurl"
23 assert stub_distribution_name("psutil") == "types-psutil"
24 assert stub_distribution_name("sassutils") == "types-libsass"
23 assert stub_distribution_name("psutil") == "types-psutil"
24 assert stub_distribution_name("sassutils") == "types-libsass"
25 assert stub_distribution_name("google.cloud.ndb") == "types-google-cloud-ndb"
24 assert stub_distribution_name("sassutils") == "types-libsass"
25 assert stub_distribution_name("google.cloud.ndb") == "types-google-cloud-ndb"
26 assert stub_distribution_name("google.cloud.ndb.submodule") == "types-google-cloud-ndb"
25 assert stub_distribution_name("google.cloud.ndb") == "types-google-cloud-ndb"
26 assert stub_distribution_name("google.cloud.ndb.submodule") == "types-google-cloud-ndb"
27 assert stub_distribution_name("google.cloud.unknown") is None
26 assert stub_distribution_name("google.cloud.ndb.submodule") == "types-google-cloud-ndb"
27 assert stub_distribution_name("google.cloud.unknown") is None
28 assert stub_distribution_name("google.protobuf") == "types-protobuf"
27 assert stub_distribution_name("google.cloud.unknown") is None
28 assert stub_distribution_name("google.protobuf") == "types-protobuf"
29 assert stub_distribution_name("google.protobuf.submodule") == "types-protobuf"
28 assert stub_distribution_name("google.protobuf") == "types-protobuf"
29 assert stub_distribution_name("google.protobuf.submodule") == "types-protobuf"
30 assert stub_distribution_name("google") is None
29 assert stub_distribution_name("google.protobuf.submodule") == "types-protobuf"
30 assert stub_distribution_name("google") is None
31
34 for top_level_module in packages: 35 assert "." not in top_level_module
239 expected_error = f"{TEST_MODULE_NAME}.{expected_error}"
240 assert expected_error not in expected_errors, (
241 "collect_cases merges cases into a single stubtest invocation; we already "
242 "expect an error for {}".format(expected_error)
243 )
244 expected_errors.add(expected_error)
257 ) 258 assert actual_errors == expected_errors, output 259
2683 ) 2684 assert output == expected 2685
2696 ) 2697 assert output == expected 2698
2702 ) 2703 assert output == "Success: no issues found in 1 module\n" 2704
2705 output = run_stubtest(stub="", runtime="def f(): pass", options=["--ignore-missing-stub"]) 2706 assert output == "Success: no issues found in 1 module\n" 2707
2710 ) 2711 assert output == "Success: no issues found in 1 module\n" 2712
2724 ) 2725 assert output == "Success: no issues found in 1 module\n" 2726
2728 output = run_stubtest(stub="", runtime="", options=["--allowlist", allowlist.name])
2729 assert output == (
2730 f"note: unused allowlist entry {TEST_MODULE_NAME}.bad\n"
2731 "Found 1 error (checked 1 module)\n"
2732 )
2733
2738 ) 2739 assert output == "Success: no issues found in 1 module\n" 2740
2767 )
2768 assert output == (
2769 f"note: unused allowlist entry unused.*\n{TEST_MODULE_NAME}.also_bad\n"
2770 )
2771 finally:
2775 output = run_stubtest(stub="+", runtime="", options=[])
2776 assert output == (
2777 "error: not checking stubs due to failed mypy compile:\n{}.pyi:1: "
2778 "error: Invalid syntax [syntax]\n".format(TEST_MODULE_NAME)
2779 )
2780
2781 output = run_stubtest(stub="def f(): ...\ndef f(): ...", runtime="", options=[])
2782 assert output == (
2783 "error: not checking stubs due to mypy build errors:\n{}.pyi:2: "
2784 'error: Name "f" already defined on line 1 [no-redef]\n'.format(TEST_MODULE_NAME)
2785 )
2786
2790 test_stubs(parse_options(["not_a_module"])) 2791 assert remove_color_code(output.getvalue()) == ( 2792 "error: not_a_module failed to find stubs\n" 2793 "Stub:\nMISSING\nRuntime:\nN/A\n\n" 2794 "Found 1 error (checked 1 module)\n" 2795 ) 2796
2806 output_str = remove_color_code(output.getvalue()) 2807 assert output_str == "Success: no issues found in 1 module\n" 2808
2810 stdlib = mypy.stubtest.get_typeshed_stdlib_modules(None, (3, 7)) 2811 assert "builtins" in stdlib 2812 assert "os" in stdlib
2811 assert "builtins" in stdlib 2812 assert "os" in stdlib 2813 assert "os.path" in stdlib
2812 assert "os" in stdlib 2813 assert "os.path" in stdlib 2814 assert "asyncio" in stdlib
2813 assert "os.path" in stdlib 2814 assert "asyncio" in stdlib 2815 assert "graphlib" not in stdlib
2814 assert "asyncio" in stdlib 2815 assert "graphlib" not in stdlib 2816 assert "formatter" in stdlib
2815 assert "graphlib" not in stdlib 2816 assert "formatter" in stdlib 2817 assert "contextvars" in stdlib # 3.7+
2816 assert "formatter" in stdlib 2817 assert "contextvars" in stdlib # 3.7+ 2818 assert "importlib.metadata" not in stdlib
2817 assert "contextvars" in stdlib # 3.7+ 2818 assert "importlib.metadata" not in stdlib 2819
2820 stdlib = mypy.stubtest.get_typeshed_stdlib_modules(None, (3, 10)) 2821 assert "graphlib" in stdlib 2822 assert "formatter" not in stdlib
2821 assert "graphlib" in stdlib 2822 assert "formatter" not in stdlib 2823 assert "importlib.metadata" in stdlib
2822 assert "formatter" not in stdlib 2823 assert "importlib.metadata" in stdlib 2824
2828 2829 assert ( 2830 str(mypy.stubtest.Signature.from_inspect_signature(inspect.signature(f))) 2831 == "def (a, b, *, c, d = ..., **kwargs)" 2832 ) 2833
2835 sig = mypy.stubtest.safe_inspect_signature(bytes.hex) 2836 assert sig is not None 2837 assert (
2836 assert sig is not None 2837 assert ( 2838 str(mypy.stubtest.Signature.from_inspect_signature(sig)) 2839 == "def (self, sep = ..., bytes_per_sep = ...)" 2840 ) 2841
2853 stub = result.files["__main__"].names["myfunction"].node 2854 assert isinstance(stub, nodes.OverloadedFuncDef) 2855 sig = mypy.stubtest.Signature.from_overloadedfuncdef(stub)
2856 if sys.version_info >= (3, 10): 2857 assert str(sig) == "def (arg: builtins.int | builtins.str)" 2858 else:
2858 else: 2859 assert str(sig) == "def (arg: Union[builtins.int, builtins.str])" 2860
2865 output = run_stubtest(stub=stub, runtime=runtime, options=[])
2866 assert output == (
2867 f"error: {TEST_MODULE_NAME}.temp variable differs from runtime type Literal[5]\n"
2868 f"Stub: in file {TEST_MODULE_NAME}.pyi:2\n_decimal.Decimal\nRuntime:\n5\n\n"
2869 "Found 1 error (checked 1 module)\n"
2870 )
2871 output = run_stubtest(stub=stub, runtime=runtime, options=[], config_file=config_file)
2871 output = run_stubtest(stub=stub, runtime=runtime, options=[], config_file=config_file) 2872 assert output == "Success: no issues found in 1 module\n" 2873
2877 output = run_stubtest(stub=stub, runtime=runtime, options=[]) 2878 assert output == ( 2879 "error: not checking stubs due to mypy build errors:\n" 2880 'test_module.pyi:1: error: Name "SOME_GLOBAL_CONST" is not defined [name-defined]\n' 2881 ) 2882
2884 output = run_stubtest(stub=stub, runtime=runtime, options=[], config_file=config_file) 2885 assert output == "Success: no issues found in 1 module\n" 2886
2893 ) 2894 assert output == "Success: no issues found in 1 module\n" 2895 assert outerr == (
2894 assert output == "Success: no issues found in 1 module\n" 2895 assert outerr == ( 2896 "test_module_config.ini: [mypy]: disable_error_code: " 2897 "Invalid error code(s): not-a-valid-name\n" 2898 ) 2899
2904 output = run_stubtest(stub=stub, runtime=runtime, options=[], config_file=config_file) 2905 assert output == ( 2906 "warning: Warning: Unpack is already enabled by default\n" 2907 "Success: no issues found in 1 module\n" 2908 ) 2909
2917 test_stubs(parse_options([])) 2918 assert remove_color_code(output.getvalue()) == "error: no modules to check\n" 2919
2923 test_stubs(parse_options(["--check-typeshed", "some_module"])) 2924 assert remove_color_code(output.getvalue()) == ( 2925 "error: cannot pass both --check-typeshed and a list of modules\n" 2926 )
290 def assert_subtype(self, s: Type, t: Type) -> None:
291 assert is_subtype(s, t), f"{s} not subtype of {t}"
292
293 def assert_not_subtype(self, s: Type, t: Type) -> None:
294 assert not is_subtype(s, t), f"{s} subtype of {t}"
295
196 A, target = self.fx.def_alias_1(self.fx.a) 197 assert get_proper_type(A) == target 198 assert get_proper_type(target) == target
197 assert get_proper_type(A) == target 198 assert get_proper_type(target) == target 199
200 A, target = self.fx.def_alias_2(self.fx.a) 201 assert get_proper_type(A) == target 202 assert get_proper_type(target) == target
201 assert get_proper_type(A) == target 202 assert get_proper_type(target) == target 203
205 A, _ = self.fx.def_alias_1(self.fx.a) 206 assert A.expand_all_if_possible() is None 207 A, _ = self.fx.def_alias_2(self.fx.a)
207 A, _ = self.fx.def_alias_2(self.fx.a) 208 assert A.expand_all_if_possible() is None 209
211 C = self.fx.non_rec_alias(TupleType([B, B], Instance(self.fx.std_tuplei, [B]))) 212 assert C.expand_all_if_possible() == TupleType( 213 [self.fx.a, self.fx.a], Instance(self.fx.std_tuplei, [self.fx.a]) 214 ) 215
221 NA = self.fx.non_rec_alias(Instance(self.fx.gi, [T]), [T], [A]) 222 assert not NA.is_recursive 223 assert has_recursive_types(NA)
222 assert not NA.is_recursive 223 assert has_recursive_types(NA) 224
229 modules = visitor.modules
230 assert modules == {"__main__", "builtins"}
231
235 modules = visitor.modules
236 assert modules == {"__main__", "builtins"}
237
348 fx = self.fx 349 assert is_more_precise(fx.b, fx.a) 350 assert is_more_precise(fx.b, fx.b)
349 assert is_more_precise(fx.b, fx.a) 350 assert is_more_precise(fx.b, fx.b) 351 assert is_more_precise(fx.b, fx.b)
350 assert is_more_precise(fx.b, fx.b) 351 assert is_more_precise(fx.b, fx.b) 352 assert is_more_precise(fx.b, fx.anyt)
351 assert is_more_precise(fx.b, fx.b) 352 assert is_more_precise(fx.b, fx.anyt) 353 assert is_more_precise(self.tuple(fx.b, fx.a), self.tuple(fx.b, fx.a))
352 assert is_more_precise(fx.b, fx.anyt) 353 assert is_more_precise(self.tuple(fx.b, fx.a), self.tuple(fx.b, fx.a)) 354 assert is_more_precise(self.tuple(fx.b, fx.b), self.tuple(fx.b, fx.a))
353 assert is_more_precise(self.tuple(fx.b, fx.a), self.tuple(fx.b, fx.a)) 354 assert is_more_precise(self.tuple(fx.b, fx.b), self.tuple(fx.b, fx.a)) 355
355 356 assert not is_more_precise(fx.a, fx.b) 357 assert not is_more_precise(fx.anyt, fx.b)
356 assert not is_more_precise(fx.a, fx.b) 357 assert not is_more_precise(fx.anyt, fx.b) 358
363 364 assert is_proper_subtype(fx.a, fx.a) 365 assert is_proper_subtype(fx.b, fx.a)
364 assert is_proper_subtype(fx.a, fx.a) 365 assert is_proper_subtype(fx.b, fx.a) 366 assert is_proper_subtype(fx.b, fx.o)
365 assert is_proper_subtype(fx.b, fx.a) 366 assert is_proper_subtype(fx.b, fx.o) 367 assert is_proper_subtype(fx.b, fx.o)
366 assert is_proper_subtype(fx.b, fx.o) 367 assert is_proper_subtype(fx.b, fx.o) 368
368 369 assert not is_proper_subtype(fx.a, fx.b) 370 assert not is_proper_subtype(fx.o, fx.b)
369 assert not is_proper_subtype(fx.a, fx.b) 370 assert not is_proper_subtype(fx.o, fx.b) 371
371 372 assert is_proper_subtype(fx.anyt, fx.anyt) 373 assert not is_proper_subtype(fx.a, fx.anyt)
372 assert is_proper_subtype(fx.anyt, fx.anyt) 373 assert not is_proper_subtype(fx.a, fx.anyt) 374 assert not is_proper_subtype(fx.anyt, fx.a)
373 assert not is_proper_subtype(fx.a, fx.anyt) 374 assert not is_proper_subtype(fx.anyt, fx.a) 375
375 376 assert is_proper_subtype(fx.ga, fx.ga) 377 assert is_proper_subtype(fx.gdyn, fx.gdyn)
376 assert is_proper_subtype(fx.ga, fx.ga) 377 assert is_proper_subtype(fx.gdyn, fx.gdyn) 378 assert not is_proper_subtype(fx.ga, fx.gdyn)
377 assert is_proper_subtype(fx.gdyn, fx.gdyn) 378 assert not is_proper_subtype(fx.ga, fx.gdyn) 379 assert not is_proper_subtype(fx.gdyn, fx.ga)
378 assert not is_proper_subtype(fx.ga, fx.gdyn) 379 assert not is_proper_subtype(fx.gdyn, fx.ga) 380
380 381 assert is_proper_subtype(fx.t, fx.t) 382 assert not is_proper_subtype(fx.t, fx.s)
381 assert is_proper_subtype(fx.t, fx.t) 382 assert not is_proper_subtype(fx.t, fx.s) 383
383 384 assert is_proper_subtype(fx.a, UnionType([fx.a, fx.b])) 385 assert is_proper_subtype(UnionType([fx.a, fx.b]), UnionType([fx.a, fx.b, fx.c]))
384 assert is_proper_subtype(fx.a, UnionType([fx.a, fx.b])) 385 assert is_proper_subtype(UnionType([fx.a, fx.b]), UnionType([fx.a, fx.b, fx.c])) 386 assert not is_proper_subtype(UnionType([fx.a, fx.b]), UnionType([fx.b, fx.c]))
385 assert is_proper_subtype(UnionType([fx.a, fx.b]), UnionType([fx.a, fx.b, fx.c])) 386 assert not is_proper_subtype(UnionType([fx.a, fx.b]), UnionType([fx.b, fx.c])) 387
390 391 assert is_proper_subtype(fx_co.gsab, fx_co.gb) 392 assert is_proper_subtype(fx_co.gsab, fx_co.ga)
391 assert is_proper_subtype(fx_co.gsab, fx_co.gb) 392 assert is_proper_subtype(fx_co.gsab, fx_co.ga) 393 assert not is_proper_subtype(fx_co.gsaa, fx_co.gb)
392 assert is_proper_subtype(fx_co.gsab, fx_co.ga) 393 assert not is_proper_subtype(fx_co.gsaa, fx_co.gb) 394 assert is_proper_subtype(fx_co.gb, fx_co.ga)
393 assert not is_proper_subtype(fx_co.gsaa, fx_co.gb) 394 assert is_proper_subtype(fx_co.gb, fx_co.ga) 395 assert not is_proper_subtype(fx_co.ga, fx_co.gb)
394 assert is_proper_subtype(fx_co.gb, fx_co.ga) 395 assert not is_proper_subtype(fx_co.ga, fx_co.gb) 396
399 400 assert is_proper_subtype(fx_contra.gsab, fx_contra.gb) 401 assert not is_proper_subtype(fx_contra.gsab, fx_contra.ga)
400 assert is_proper_subtype(fx_contra.gsab, fx_contra.gb) 401 assert not is_proper_subtype(fx_contra.gsab, fx_contra.ga) 402 assert is_proper_subtype(fx_contra.gsaa, fx_contra.gb)
401 assert not is_proper_subtype(fx_contra.gsab, fx_contra.ga) 402 assert is_proper_subtype(fx_contra.gsaa, fx_contra.gb) 403 assert not is_proper_subtype(fx_contra.gb, fx_contra.ga)
402 assert is_proper_subtype(fx_contra.gsaa, fx_contra.gb) 403 assert not is_proper_subtype(fx_contra.gb, fx_contra.ga) 404 assert is_proper_subtype(fx_contra.ga, fx_contra.gb)
403 assert not is_proper_subtype(fx_contra.gb, fx_contra.ga) 404 assert is_proper_subtype(fx_contra.ga, fx_contra.gb) 405
408 409 assert is_proper_subtype(fx.gsab, fx.gb) 410 assert not is_proper_subtype(fx.gsab, fx.ga)
409 assert is_proper_subtype(fx.gsab, fx.gb) 410 assert not is_proper_subtype(fx.gsab, fx.ga) 411 assert not is_proper_subtype(fx.gsaa, fx.gb)
410 assert not is_proper_subtype(fx.gsab, fx.ga) 411 assert not is_proper_subtype(fx.gsaa, fx.gb) 412 assert not is_proper_subtype(fx.gb, fx.ga)
411 assert not is_proper_subtype(fx.gsaa, fx.gb) 412 assert not is_proper_subtype(fx.gb, fx.ga) 413 assert not is_proper_subtype(fx.ga, fx.gb)
412 assert not is_proper_subtype(fx.gb, fx.ga) 413 assert not is_proper_subtype(fx.ga, fx.gb) 414
421 422 assert is_proper_subtype(lit1, fx.a) 423 assert not is_proper_subtype(lit1, fx.d)
422 assert is_proper_subtype(lit1, fx.a) 423 assert not is_proper_subtype(lit1, fx.d) 424 assert not is_proper_subtype(fx.a, lit1)
423 assert not is_proper_subtype(lit1, fx.d) 424 assert not is_proper_subtype(fx.a, lit1) 425 assert is_proper_subtype(fx.uninhabited, lit1)
424 assert not is_proper_subtype(fx.a, lit1) 425 assert is_proper_subtype(fx.uninhabited, lit1) 426 assert not is_proper_subtype(lit1, fx.uninhabited)
425 assert is_proper_subtype(fx.uninhabited, lit1) 426 assert not is_proper_subtype(lit1, fx.uninhabited) 427 assert is_proper_subtype(lit1, lit1)
426 assert not is_proper_subtype(lit1, fx.uninhabited) 427 assert is_proper_subtype(lit1, lit1) 428 assert not is_proper_subtype(lit1, lit2)
427 assert is_proper_subtype(lit1, lit1) 428 assert not is_proper_subtype(lit1, lit2) 429 assert not is_proper_subtype(lit2, lit3)
428 assert not is_proper_subtype(lit1, lit2) 429 assert not is_proper_subtype(lit2, lit3) 430
430 431 assert is_subtype(lit1, fx.a) 432 assert not is_subtype(lit1, fx.d)
431 assert is_subtype(lit1, fx.a) 432 assert not is_subtype(lit1, fx.d) 433 assert not is_subtype(fx.a, lit1)
432 assert not is_subtype(lit1, fx.d) 433 assert not is_subtype(fx.a, lit1) 434 assert is_subtype(fx.uninhabited, lit1)
433 assert not is_subtype(fx.a, lit1) 434 assert is_subtype(fx.uninhabited, lit1) 435 assert not is_subtype(lit1, fx.uninhabited)
434 assert is_subtype(fx.uninhabited, lit1) 435 assert not is_subtype(lit1, fx.uninhabited) 436 assert is_subtype(lit1, lit1)
435 assert not is_subtype(lit1, fx.uninhabited) 436 assert is_subtype(lit1, lit1) 437 assert not is_subtype(lit1, lit2)
436 assert is_subtype(lit1, lit1) 437 assert not is_subtype(lit1, lit2) 438 assert not is_subtype(lit2, lit3)
437 assert not is_subtype(lit1, lit2) 438 assert not is_subtype(lit2, lit3) 439
439 440 assert not is_proper_subtype(lit1, fx.anyt) 441 assert not is_proper_subtype(fx.anyt, lit1)
440 assert not is_proper_subtype(lit1, fx.anyt) 441 assert not is_proper_subtype(fx.anyt, lit1) 442
442 443 assert is_subtype(lit1, fx.anyt) 444 assert is_subtype(fx.anyt, lit1)
443 assert is_subtype(lit1, fx.anyt) 444 assert is_subtype(fx.anyt, lit1) 445
448 AA1, _ = self.fx.def_alias_1(self.fx.a) 449 assert is_subtype(A1, AA1) 450 assert is_subtype(AA1, A1)
449 assert is_subtype(A1, AA1) 450 assert is_subtype(AA1, A1) 451
453 AA2, _ = self.fx.def_alias_2(self.fx.a) 454 assert is_subtype(A2, AA2) 455 assert is_subtype(AA2, A2)
454 assert is_subtype(A2, AA2) 455 assert is_subtype(AA2, A2) 456
458 B2, _ = self.fx.def_alias_2(self.fx.b) 459 assert is_subtype(B1, A1) 460 assert is_subtype(B2, A2)
459 assert is_subtype(B1, A1) 460 assert is_subtype(B2, A2) 461 assert not is_subtype(A1, B1)
460 assert is_subtype(B2, A2) 461 assert not is_subtype(A1, B1) 462 assert not is_subtype(A2, B2)
461 assert not is_subtype(A1, B1) 462 assert not is_subtype(A2, B2) 463
463 464 assert not is_subtype(A2, A1) 465 assert is_subtype(A1, A2)
464 assert not is_subtype(A2, A1) 465 assert is_subtype(A1, A2) 466
470 tuple_type = self.tuple() 471 assert tuple_type.can_be_false 472 assert not tuple_type.can_be_true
471 assert tuple_type.can_be_false 472 assert not tuple_type.can_be_true 473
475 tuple_type = self.tuple(AnyType(TypeOfAny.special_form), AnyType(TypeOfAny.special_form)) 476 assert tuple_type.can_be_true 477 assert not tuple_type.can_be_false
476 assert tuple_type.can_be_true 477 assert not tuple_type.can_be_false 478
480 union_type = UnionType([self.fx.a, self.tuple()]) 481 assert union_type.can_be_true 482
484 union_type = UnionType([self.tuple(), self.tuple()]) 485 assert not union_type.can_be_true 486
488 union_type = UnionType([self.fx.a, self.tuple()]) 489 assert union_type.can_be_false 490
492 union_type = UnionType([self.tuple(self.fx.a), self.tuple(self.fx.d)]) 493 assert not union_type.can_be_false 494
503 to = true_only(always_true) 504 assert always_true is to 505
508 assert_equal(str(to), "A") 509 assert to.can_be_true 510 assert not to.can_be_false
509 assert to.can_be_true 510 assert not to.can_be_false 511 assert_type(Instance, to)
512 # The original class still can be false 513 assert self.fx.a.can_be_false 514
520 to = true_only(union_type) 521 assert isinstance(to, UnionType) 522 assert_equal(len(to.items), 2)
522 assert_equal(len(to.items), 2) 523 assert to.items[0].can_be_true 524 assert not to.items[0].can_be_false
523 assert to.items[0].can_be_true 524 assert not to.items[0].can_be_false 525 assert to.items[1] is tup_type
524 assert not to.items[0].can_be_false 525 assert to.items[1] is tup_type 526
542 fo = false_only(always_false) 543 assert always_false is fo 544
547 assert_equal(str(fo), "A") 548 assert not fo.can_be_true 549 assert fo.can_be_false
548 assert not fo.can_be_true 549 assert fo.can_be_false 550 assert_type(Instance, fo)
551 # The original class still can be true 552 assert self.fx.a.can_be_true 553
563 fo = false_only(union_type) 564 assert isinstance(fo, UnionType) 565 assert_equal(len(fo.items), 2)
565 assert_equal(len(fo.items), 2) 566 assert not fo.items[0].can_be_true 567 assert fo.items[0].can_be_false
566 assert not fo.items[0].can_be_true 567 assert fo.items[0].can_be_false 568 assert fo.items[1] is tup_type
567 assert fo.items[0].can_be_false 568 assert fo.items[1] is tup_type 569
818 u = make_simplified_union([true_a, false_o]) 819 assert u.can_be_true 820 assert u.can_be_false
819 assert u.can_be_true 820 assert u.can_be_false 821
826 j = join_types(true_any, false_o) 827 assert j.can_be_true 828 assert j.can_be_false
827 assert j.can_be_true 828 assert j.can_be_false 829
964 j = join_types(t1, t1) 965 assert isinstance(j, CallableType) 966 assert j.is_type_obj()
965 assert isinstance(j, CallableType) 966 assert j.is_type_obj() 967
1078 assert_equal(actual, expected, f"join({s}, {t}) == {{}} ({{}} expected)")
1079 assert is_subtype(s, result), f"{s} not subtype of {result}"
1080 assert is_subtype(t, result), f"{t} not subtype of {result}"
1079 assert is_subtype(s, result), f"{s} not subtype of {result}"
1080 assert is_subtype(t, result), f"{t} not subtype of {result}"
1081
1303 1304 assert is_same_type(lit1, narrow_declared_type(lit1, a)) 1305 assert is_same_type(lit2, narrow_declared_type(lit2, a))
1304 assert is_same_type(lit1, narrow_declared_type(lit1, a)) 1305 assert is_same_type(lit2, narrow_declared_type(lit2, a)) 1306
1351 assert_equal(actual, expected, f"meet({s}, {t}) == {{}} ({{}} expected)")
1352 assert is_subtype(result, s), f"{result} not subtype of {s}"
1353 assert is_subtype(result, t), f"{result} not subtype of {t}"
1352 assert is_subtype(result, s), f"{result} not subtype of {s}"
1353 assert is_subtype(result, t), f"{result} not subtype of {t}"
1354
1422 t = UnionType.make_union([self.fx.a, self.fx.a]) 1423 assert remove_instance_last_known_values(t) == self.fx.a 1424 t = UnionType.make_union([self.fx.a, self.fx.a, self.fx.b])
1434 t = UnionType.make_union([self.fx.lit1_inst, self.fx.lit2_inst, self.fx.lit4_inst]) 1435 assert remove_instance_last_known_values(t) == self.fx.a 1436 t = UnionType.make_union(
1446 t2 = remove_instance_last_known_values(t) 1447 assert type(t2) is UnionType 1448 assert t2.items == expected
1447 assert type(t2) is UnionType 1448 assert t2.items == expected 1449
1544 c = find_shallow_matching_overload_item(ov, call) 1545 assert c in ov.items 1546 assert ov.items.index(c) == expected_index
1545 assert c in ov.items 1546 assert ov.items.index(c) == expected_index 1547
1593 get_proper_type_count -= len(re.findall(r"get_proper_type\(\)", code)) 1594 assert get_proper_type_count == self.ALLOWED_GET_PROPER_TYPES
17 with mock.patch.dict(os.environ, values=mock_environ, clear=True): 18 assert get_terminal_width() == 80 19
20 def test_parse_location_windows(self) -> None: 21 assert parse_location(r"C:\test.py:1:1") == (r"C:\test.py", [1, 1]) 22 assert parse_location(r"C:\test.py:1:1:1:1") == (r"C:\test.py", [1, 1, 1, 1])
21 assert parse_location(r"C:\test.py:1:1") == (r"C:\test.py", [1, 1]) 22 assert parse_location(r"C:\test.py:1:1:1:1") == (r"C:\test.py", [1, 1, 1, 1]) 23
41 ) 42 assert result == expected 43
63 ) 64 assert result == expected 65
90 ) 91 assert result == expected 92
110 ) 111 assert result == expected
15 collector = testcase.parent 16 assert isinstance(collector, DataFileCollector) 17 for fix in _iter_fixes(testcase, actual, incremental_step=incremental_step):
61 def type(self, type: Type) -> Type: 62 assert type is not None 63 return type
296 if o.kind == REVEAL_TYPE: 297 assert o.expr is not None 298 o.expr.accept(self)
146 def visit_mypy_file(self, node: MypyFile) -> MypyFile: 147 assert self.test_only, "This visitor should not be used for whole files." 148 # NOTE: The 'names' and 'imports' instance variables will be empty!
248 new_type = self.optional_type(node.type) 249 assert isinstance(new_type, ProperType) 250 new.type = new_type
431 class_ref = p.class_ref.accept(self) 432 assert isinstance(class_ref, RefExpr) 433 return ClassPattern(
547 if node.kind == REVEAL_TYPE: 548 assert node.expr is not None 549 return RevealExpr(kind=REVEAL_TYPE, expr=self.expr(node.expr))
555 call = self.expr(node.call) 556 assert isinstance(call, CallExpr) 557 new = SuperExpr(node.name, call)
702 new = node.accept(self) 703 assert isinstance(new, MypyFile) 704 new.set_line(node)
708 new = expr.accept(self) 709 assert isinstance(new, Expression) 710 new.set_line(expr)
714 new = stmt.accept(self) 715 assert isinstance(new, Statement) 716 new.set_line(stmt)
720 new = pattern.accept(self) 721 assert isinstance(new, Pattern) 722 new.set_line(pattern)
145 else: 146 assert False 147 self.scope[tvar_expr.fullname] = tvar_def
154 fullname = item.fullname if isinstance(item, SymbolTableNode) else item 155 assert fullname 156 if fullname in self.scope:
226 raw_last_known_value = t.last_known_value.accept(self) 227 assert isinstance(raw_last_known_value, LiteralType) # type: ignore[misc] 228 last_known_value = raw_last_known_value
290 fallback = t.fallback.accept(self) 291 assert isinstance(fallback, Instance) # type: ignore[misc] 292 return LiteralType(value=t.value, fallback=fallback, line=t.line, column=t.column)
325 new = item.accept(self) 326 assert isinstance(new, CallableType) # type: ignore[misc] 327 items.append(new)
481 else: 482 assert strategy == ALL_STRATEGY 483 self.default = True
359 return AnyType(TypeOfAny.from_error) 360 assert isinstance(tvar_def, ParamSpecType) 361 if len(t.args) > 0:
398 if isinstance(sym.node, TypeVarExpr) and tvar_def is not None: 399 assert isinstance(tvar_def, TypeVarType) 400 if len(t.args) > 0:
431 return AnyType(TypeOfAny.from_error) 432 assert isinstance(tvar_def, TypeVarTupleType) 433 if not self.allow_type_var_tuple:
920 if name is None: 921 assert sym.node is not None 922 name = sym.node.name
1002 elif unbound_tvar: 1003 assert isinstance(sym.node, TypeVarLikeExpr) 1004 if sym.node.is_new_style:
1045 # This type should exist only temporarily during type inference 1046 assert False, "Internal error: Unexpected erased type" 1047
1244 else: 1245 assert False, kind 1246 return tvar_name, make_paramspec(
1389 def visit_partial_type(self, t: PartialType) -> Type: 1390 assert False, "Internal error: Unexpected partial type" 1391
1417 # TODO: Handle non-TypeInfo 1418 assert isinstance(n.node, TypeInfo) 1419 return self.analyze_type_with_type_info(n.node, t.args, t, False)
1593 return AnyType(TypeOfAny.from_error) 1594 assert isinstance(ret, CallableType) 1595 return ret.accept(self)
1627 else: 1628 assert found.fullname is not None 1629 kind = ARG_KINDS_BY_CONSTRUCTOR[found.fullname]
1700 ): 1701 assert arg.original_str_fallback is not None 1702 return [
1757 fallback = self.named_type(arg.base_type_name) 1758 assert isinstance(fallback, Instance) 1759 return [LiteralType(arg.literal_value, fallback, line=arg.line, column=arg.column)]
1832 var_node = self.lookup_qualified(var.name, defn) 1833 assert var_node, "Binding for function type variable not found within function" 1834 var_expr = var_node.node
1834 var_expr = var_node.node 1835 assert isinstance(var_expr, TypeVarLikeExpr) 1836 binding = self.tvar_scope.bind_new(var.name, var_expr)
1954 node = self.lookup_fully_qualified(fullname) 1955 assert isinstance(node.node, TypeInfo) 1956 any_type = AnyType(TypeOfAny.special_form)
1979 if num_unpacks > 1:
1980 assert final_unpack is not None
1981 self.fail("More than one variadic Unpack in a type is not allowed", final_unpack.type)
2029 args = list(t.args) 2030 assert t.type.type_var_tuple_prefix is not None 2031 tvt = t.type.defn.type_vars[t.type.type_var_tuple_prefix]
2031 tvt = t.type.defn.type_vars[t.type.type_var_tuple_prefix] 2032 assert isinstance(tvt, TypeVarTupleType) 2033 args[t.type.type_var_tuple_prefix] = UnpackType(
2086 fixed = expand_type(t, env) 2087 assert isinstance(fixed, Instance) 2088 t.args = fixed.args
2144 if no_args: 2145 assert isinstance(node.target, Instance) # type: ignore[misc] 2146 # Note: this is the only case where we use an eager expansion. See more info about
2212 unpack_arg = args[unpack] 2213 assert isinstance(unpack_arg, UnpackType) 2214 if isinstance(unpack_arg.type, TypeVarTupleType):
2225 typ = TypeAliasType(node, args, ctx.line, ctx.column) 2226 assert typ.alias is not None 2227 # HACK: Implement FlexibleAlias[T, typ] by expanding it to typ here.
2232 exp = get_proper_type(typ) 2233 assert isinstance(exp, Instance) 2234 return exp.args[-1]
2280 fixed = expand_type(t, env) 2281 assert isinstance(fixed, TypeAliasType) 2282 t.args = fixed.args
2284 if used_any_type and disallow_any and node.alias_tvars: 2285 assert fail is not None 2286 if unexpanded_type:
2319 def visit_type_alias_type(self, t: TypeAliasType) -> Type:
2320 assert t.alias is not None, f"Unfixed type alias {t.type_ref}"
2321 if t.alias in self.seen_nodes:
2488 unpack_arg = t.args[unpack] 2489 assert isinstance(unpack_arg, UnpackType) 2490 if isinstance(unpack_arg.type, TypeVarTupleType):
2490 if isinstance(unpack_arg.type, TypeVarTupleType): 2491 assert t.type.type_var_tuple_prefix is not None 2492 assert t.type.type_var_tuple_suffix is not None
2491 assert t.type.type_var_tuple_prefix is not None 2492 assert t.type.type_var_tuple_suffix is not None 2493 exp_prefix = t.type.type_var_tuple_prefix
173 # The two is_valid_constructor() checks ensure this. 174 assert isinstance(new_method.node, (SYMBOL_FUNCBASE_TYPES, Decorator)) 175 assert isinstance(init_method.node, (SYMBOL_FUNCBASE_TYPES, Decorator))
174 assert isinstance(new_method.node, (SYMBOL_FUNCBASE_TYPES, Decorator)) 175 assert isinstance(init_method.node, (SYMBOL_FUNCBASE_TYPES, Decorator)) 176
228 else: 229 assert isinstance(method.type, ProperType) 230 assert isinstance(method.type, FunctionLike) # is_valid_constructor() ensures this
229 assert isinstance(method.type, ProperType) 230 assert isinstance(method.type, FunctionLike) # is_valid_constructor() ensures this 231 t = method.type
294 # Overloaded __init__/__new__. 295 assert isinstance(signature, Overloaded) 296 items: list[CallableType] = []
429 return cast(F, Overloaded(items)) 430 assert isinstance(method, CallableType) 431 func: CallableType = method
836 if func.type: 837 assert isinstance(func.type, FunctionLike) 838 return func.type
845 # TODO: make sure the caller defers if possible. 846 assert isinstance(func, OverloadedFuncDef) 847 any_type = AnyType(TypeOfAny.from_error)
347 """ 348 assert self.alias is not None 349 if self.alias.no_args:
351 # as their target. 352 assert isinstance(self.alias.target, Instance) # type: ignore[misc] 353 return self.alias.target.copy_modified(args=self.args)
362 tvar = self.alias.alias_tvars[prefix]
363 assert isinstance(tvar, TypeVarTupleType)
364 mapping = {tvar.id: TupleType(list(middle), tvar.tuple_fallback)}
379 unrolled = alias.accept(unroller) 380 assert isinstance(unrolled, ProperType) 381 return unrolled, unroller.recursed
403 """ 404 assert self.alias is not None, "Unfixed type alias" 405 is_recursive = self.alias._is_recursive
440 def serialize(self) -> JsonDict:
441 assert self.alias is not None
442 data: JsonDict = {
450 def deserialize(cls, data: JsonDict) -> TypeAliasType: 451 assert data[".class"] == "TypeAliasType" 452 args: list[Type] = []
454 args_list = data["args"] 455 assert isinstance(args_list, list) 456 args = [deserialize_type(arg) for arg in args_list]
463 write_type_list(data, self.args) 464 assert self.alias is not None 465 write_str(data, self.alias.fullname)
670 super().__init__(name, fullname, id, upper_bound, default, line, column) 671 assert values is not None, "No restrictions must be represented by empty list" 672 self.values = values
713 def serialize(self) -> JsonDict:
714 assert not self.id.is_meta_var()
715 return {
728 def deserialize(cls, data: JsonDict) -> TypeVarType: 729 assert data[".class"] == "TypeVarType" 730 return TypeVarType(
865 def serialize(self) -> JsonDict:
866 assert not self.id.is_meta_var()
867 return {
880 def deserialize(cls, data: JsonDict) -> ParamSpecType: 881 assert data[".class"] == "ParamSpecType" 882 return ParamSpecType(
904 def read(cls, data: Buffer) -> ParamSpecType: 905 assert read_tag(data) == PARAMETERS 906 prefix = Parameters.read(data)
945 def serialize(self) -> JsonDict:
946 assert not self.id.is_meta_var()
947 return {
960 def deserialize(cls, data: JsonDict) -> TypeVarTupleType: 961 assert data[".class"] == "TypeVarTupleType" 962 return TypeVarTupleType(
984 def read(cls, data: Buffer) -> TypeVarTupleType: 985 assert read_tag(data) == INSTANCE 986 fallback = Instance.read(data)
1119 def deserialize(cls, data: JsonDict) -> UnboundType: 1120 assert data[".class"] == "UnboundType" 1121 return UnboundType(
1170 def accept(self, visitor: TypeVisitor[T]) -> T: 1171 assert isinstance(visitor, SyntheticTypeVisitor) 1172 ret: T = visitor.visit_callable_argument(self)
1175 def serialize(self) -> JsonDict: 1176 assert False, "Synthetic types don't serialize" 1177
1196 def accept(self, visitor: TypeVisitor[T]) -> T: 1197 assert isinstance(visitor, SyntheticTypeVisitor) 1198 ret: T = visitor.visit_type_list(self)
1201 def serialize(self) -> JsonDict: 1202 assert False, "Synthetic types don't serialize" 1203
1247 def deserialize(cls, data: JsonDict) -> UnpackType: 1248 assert data[".class"] == "UnpackType" 1249 typ = data["type"]
1285 # Only unimported type anys and anys from other anys should have an import name 1286 assert missing_import_name is None or type_of_any in ( 1287 TypeOfAny.from_unimported_type, 1288 TypeOfAny.from_another_any, 1289 ) 1290 # Only Anys that come from another Any can have source_any.
1290 # Only Anys that come from another Any can have source_any. 1291 assert type_of_any != TypeOfAny.from_another_any or source_any is not None 1292 # We should not have chains of Anys.
1292 # We should not have chains of Anys. 1293 assert not self.source_any or self.source_any.type_of_any != TypeOfAny.from_another_any 1294
1338 def deserialize(cls, data: JsonDict) -> AnyType: 1339 assert data[".class"] == "AnyType" 1340 source = data["source_any"]
1355 if read_bool(data): 1356 assert read_tag(data) == ANY_TYPE 1357 source_any = AnyType.read(data)
1404 def deserialize(cls, data: JsonDict) -> UninhabitedType: 1405 assert data[".class"] == "UninhabitedType" 1406 return UninhabitedType()
1443 def deserialize(cls, data: JsonDict) -> NoneType: 1444 assert data[".class"] == "NoneType" 1445 return NoneType()
1497 def deserialize(cls, data: JsonDict) -> DeletedType: 1498 assert data[".class"] == "DeletedType" 1499 return DeletedType(data["source"])
1556 def deserialize(cls, data: JsonDict) -> ExtraAttrs: 1557 assert data[".class"] == "ExtraAttrs" 1558 return ExtraAttrs(
1674 def serialize(self) -> JsonDict | str: 1675 assert self.type is not None 1676 type_ref = self.type.fullname
1694 return inst 1695 assert data[".class"] == "Instance" 1696 args: list[Type] = []
1698 args_list = data["args"] 1699 assert isinstance(args_list, list) 1700 args = [deserialize_type(arg) for arg in args_list]
1725 if read_bool(data): 1726 assert read_tag(data) == LITERAL_TYPE 1727 inst.last_known_value = LiteralType.read(data)
1864 self.arg_names = list(arg_names) 1865 assert len(arg_types) == len(arg_kinds) == len(arg_names) 1866 assert not any(isinstance(t, Parameters) for t in arg_types)
1865 assert len(arg_types) == len(arg_kinds) == len(arg_names) 1866 assert not any(isinstance(t, Parameters) for t in arg_types) 1867 self.min_args = arg_kinds.count(ARG_POS)
1995 def deserialize(cls, data: JsonDict) -> Parameters: 1996 assert data[".class"] == "Parameters" 1997 return Parameters(
2105 super().__init__(line, column) 2106 assert len(arg_types) == len(arg_kinds) == len(arg_names) 2107 self.arg_types = list(arg_types)
2109 if isinstance(t, ParamSpecType): 2110 assert not t.prefix.arg_types 2111 # TODO: should we assert that only ARG_STAR contain ParamSpecType?
2117 self.fallback = fallback 2118 assert not name or "<bound method" not in name 2119 self.name = name
2226 def type_object(self) -> mypy.nodes.TypeInfo: 2227 assert self.is_type_obj() 2228 ret = get_proper_type(self.ret_type)
2234 ret = ret.fallback 2235 assert isinstance(ret, Instance) 2236 return ret.type
2364 if isinstance(p_type, Instance): 2365 assert p_type.type.fullname == "builtins.tuple" 2366 self.arg_types[star_index] = p_type.args[0]
2371 last_type = get_proper_type(self.arg_types[-1]) 2372 assert isinstance(last_type, TypedDictType) 2373 extra_kinds = [
2424 nested_unpack = unpacked.items[unpack_index] 2425 assert isinstance(nested_unpack, UnpackType) 2426 nested_unpacked = get_proper_type(nested_unpack.type)
2432 if isinstance(nested_unpacked, Instance): 2433 assert nested_unpacked.type.fullname == "builtins.tuple" 2434 new_unpack = nested_unpacked.args[0]
2508 def deserialize(cls, data: JsonDict) -> CallableType: 2509 assert data[".class"] == "CallableType" 2510 # The .definition link is set in fixup.py.
2550 def read(cls, data: Buffer) -> CallableType: 2551 assert read_tag(data) == INSTANCE 2552 fallback = Instance.read(data)
2642 def deserialize(cls, data: JsonDict) -> Overloaded: 2643 assert data[".class"] == "Overloaded" 2644 return Overloaded([CallableType.deserialize(t) for t in data["items"]])
2653 for _ in range(read_int(data)): 2654 assert read_tag(data) == CALLABLE_TYPE 2655 items.append(CallableType.read(data))
2746 def deserialize(cls, data: JsonDict) -> TupleType: 2747 assert data[".class"] == "TupleType" 2748 return TupleType(
2761 def read(cls, data: Buffer) -> TupleType: 2762 assert read_tag(data) == INSTANCE 2763 fallback = Instance.read(data)
2786 unpack_index = find_unpack_in_list(self.items) 2787 assert unpack_index is not None 2788 if begin is None and end is None:
2927 def deserialize(cls, data: JsonDict) -> TypedDictType: 2928 assert data[".class"] == "TypedDictType" 2929 return TypedDictType(
2944 def read(cls, data: Buffer) -> TypedDictType: 2945 assert read_tag(data) == INSTANCE 2946 fallback = Instance.read(data)
2960 return self 2961 assert self.fallback.type.typeddict_type is not None 2962 return self.fallback.type.typeddict_type.as_anonymous()
3075 def accept(self, visitor: TypeVisitor[T]) -> T: 3076 assert isinstance(visitor, SyntheticTypeVisitor) 3077 ret: T = visitor.visit_raw_expression_type(self)
3080 def serialize(self) -> JsonDict: 3081 assert False, "Synthetic types don't serialize" 3082
3195 def deserialize(cls, data: JsonDict) -> LiteralType: 3196 assert data[".class"] == "LiteralType" 3197 return LiteralType(value=data["value"], fallback=Instance.deserialize(data["fallback"]))
3205 def read(cls, data: Buffer) -> LiteralType: 3206 assert read_tag(data) == INSTANCE 3207 fallback = Instance.read(data)
3305 def deserialize(cls, data: JsonDict) -> UnionType: 3306 assert data[".class"] == "UnionType" 3307 return UnionType(
3371 def accept(self, visitor: TypeVisitor[T]) -> T: 3372 assert isinstance(visitor, SyntheticTypeVisitor) 3373 ret: T = visitor.visit_ellipsis_type(self)
3376 def serialize(self) -> JsonDict: 3377 assert False, "Synthetic types don't serialize" 3378
3453 def deserialize(cls, data: JsonDict) -> Type: 3454 assert data[".class"] == "TypeType" 3455 return TypeType.make_normalized(deserialize_type(data["item"]))
3489 def accept(self, visitor: TypeVisitor[T]) -> T: 3490 assert isinstance(visitor, SyntheticTypeVisitor) 3491 ret: T = visitor.visit_placeholder_type(self)
3504 # during semantic analysis.
3505 assert False, f"Internal error: unresolved placeholder type {self.fullname}"
3506
3636 if t.type.fullname == "builtins.tuple":
3637 assert len(t.args) == 1
3638 s += f"[{self.list_str(t.args)}, ...]"
3880 ) -> None: 3881 assert cache is not None 3882 super().__init__(cache)
3975 return types 3976 assert item is not None 3977 if idx < prefix:
4027 old_index = unpack_index 4028 assert old_index is None 4029 # Don't return so that we can also sanity check there is only one.
4186 return DeletedType.read(data)
4187 assert False, f"Unknown type tag {tag}"
4188
4195 return Overloaded.read(data)
4196 assert False, f"Invalid type tag for FunctionLike {tag}"
4197
4206 return TypeVarTupleType.read(data)
4207 assert False, f"Invalid type tag for TypeVarLikeType {tag}"
4208
65 return True
66 assert target.alias, f"Unfixed type alias {target.type_ref}"
67 return is_invalid_recursive_alias(seen_nodes | {target.alias}, get_proper_type(target))
67 return is_invalid_recursive_alias(seen_nodes | {target.alias}, get_proper_type(target))
68 assert isinstance(target, ProperType)
69 if not isinstance(target, (UnionType, TupleType)):
226 def record_protocol_subtype_check(self, left_type: TypeInfo, right_type: TypeInfo) -> None: 227 assert right_type.is_protocol 228 self._rechecked_types.add(left_type)
294 """ 295 assert self.proto_deps is not None, "This should not be called after failed cache load" 296 new_deps = self._snapshot_protocol_deps()
45 else: 46 assert isinstance(tv, ParamSpecType) 47 tv = ParamSpecType(
70 erased_tuple_type = erase_typevars(typ.tuple_type, {tv.id for tv in typ.defn.type_vars})
71 assert isinstance(erased_tuple_type, ProperType)
72 if isinstance(erased_tuple_type, TupleType):
20 ) -> tuple[tuple[Type, ...], tuple[Type, ...], tuple[Type, ...]]: 21 assert typ.type.type_var_tuple_prefix is not None 22 assert typ.type.type_var_tuple_suffix is not None
21 assert typ.type.type_var_tuple_prefix is not None 22 assert typ.type.type_var_tuple_suffix is not None 23 return split_with_prefix_and_suffix(
262 ) -> str: 263 from xml.sax.saxutils import escape 264
568 """ 569 return hashlib.sha1(data).hexdigest() 570
573 """Reproduce a gray color in ANSI escape sequence""" 574 assert sys.platform != "win32", "curses is not available on Windows" 575 set_color = "".join([cup[:-1].decode(), "m"])
624 # Windows and Emscripten can both use ANSI/VT100 escape sequences for color
625 assert sys.platform in ("win32", "emscripten")
626 self.BOLD = "\033[1m"
662 return True 663 assert False, "Running not on Windows" 664
243 else:
244 assert False, f"Add deprecation message for 'mypy_extensions.{name}'"
245 import warnings
16 import os.path 17 import subprocess 18 import sys
66 env["PYTHONPATH"] = base_path + os.pathsep + env.get("PYTHONPATH", "")
67 cmd = subprocess.run([sys.executable, setup_file, "build_ext", "--inplace"], env=env)
68 sys.exit(cmd.returncode)
288 self_type = op.fn.sig.args[0].type 289 assert isinstance(self_type, RInstance), self_type 290 cl = self_type.class_ir
296 self_type = op.fn.sig.args[0].type 297 assert isinstance(self_type, RInstance), self_type 298 cl = self_type.class_ir
73 ) -> None: 74 assert exits 75 self.succ = succ
93 for block in blocks: 94 assert not any( 95 isinstance(op, ControlOp) for op in block.ops[:-1] 96 ), "Control-flow ops must be at the end of blocks" 97
568 else: 569 assert universe is not None, "Universe must be defined for a must analysis" 570 before[block] = set(universe)
592 new_before &= after[pred] 593 assert new_before is not None 594 else:
262 for src in op.src: 263 assert isinstance(op.dest.type, RArray) 264 self.check_type_coercion(op, src.type, op.dest.type.item_type)
312 313 assert expected_type is not None, "Missed a case for LoadLiteral check" 314
95 self_type = op.fn.sig.args[0].type 96 assert isinstance(self_type, RInstance), self_type 97 cl = self_type.class_ir
152 tree = result.graph[mod].tree 153 assert tree is not None 154 annotations.append(
206
207 h = hashlib.sha1()
208 h.update(",".join(modules).encode())
304 full_module_name = source.module 305 assert source.path 306 if os.path.split(source.path)[1] == "__init__.py":
447 if compiler_options.group_name is not None: 448 assert len(groups) == 1, "If using custom group_name, only one group is expected" 449
186 self._indent -= 4 187 assert self._indent >= 0 188
325 return self.tuple_undefined_value(rtype) 326 assert False, rtype 327
453 else: 454 assert False, "not expecting tuple with error overlap" 455 if isinstance(item_type, RTuple):
594 value_type = optional_value_type(src_type) 595 assert value_type is not None 596 if is_same_type(value_type, typ):
642 else:
643 assert False, f"unexpected primitive type: {typ}"
644 check = "({}_Check({}))"
719 elif isinstance(typ, RTuple): 720 assert not optional 721 self.emit_tuple_cast(src, dest, typ, declare_dest, error, src_type)
722 else: 723 assert False, "Cast not implemented: %s" % typ 724
745 else:
746 assert isinstance(error, ReturnHandler), error
747 self.emit_line("return %s;" % error.value)
874 else: 875 assert isinstance(error, ReturnHandler), error 876 failure = "return %s;" % error.value
909 # Whether we are borrowing or not makes no difference. 910 assert not optional # Not supported for overlapping error values 911 if declare_dest:
917 # Whether we are borrowing or not makes no difference. 918 assert not optional # Not supported for overlapping error values 919 if declare_dest:
925 # Whether we are borrowing or not makes no difference. 926 assert not optional # Not supported for overlapping error values 927 if declare_dest:
933 # Whether we are borrowing or not makes no difference. 934 assert not optional # Not supported for overlapping error values 935 if declare_dest:
940 elif is_float_rprimitive(typ): 941 assert not optional # Not supported for overlapping error values 942 if declare_dest:
997 else: 998 assert False, "Unboxing not implemented: %s" % typ 999
1054 else: 1055 assert not typ.is_unboxed 1056 # Type is boxed -- trivially just assign.
1094 else: 1095 assert False, "emit_gc_visit() not implemented for %s" % repr(rtype) 1096
1118 else: 1119 assert False, "emit_gc_clear() not implemented for %s" % repr(rtype) 1120
1183 if type_str:
1184 assert src
1185 line += f", {type_str}, {src}"
206 """ 207 assert cl.reuse_freed_instance 208
209 # The free list implementation doesn't support class hierarchies 210 assert cl.is_final_class or cl.children == [] 211
320 if generate_full: 321 assert cl.setup is not None 322 emitter.emit_line(native_function_header(cl.setup, emitter) + ";")
322 emitter.emit_line(native_function_header(cl.setup, emitter) + ";") 323 assert cl.ctor is not None 324 emitter.emit_line(native_function_header(cl.ctor, emitter) + ";")
165 terminator = block.terminator 166 assert isinstance(terminator, ControlOp), terminator 167
260 else: 261 assert False, "Invalid branch" 262
314 typ = op.dest.type 315 assert isinstance(typ, RArray), typ 316 dest = self.reg(op.dest)
378 if op.class_type.class_ir.is_trait:
379 assert not decl_cl.is_trait
380 cast = f"({decl_cl.struct_name(self.emitter.names)} *)"
495 # Again, use vtable access for properties... 496 assert not op.is_init and op.error_kind == ERR_FALSE, "%s %d %d %s" % ( 497 op.attr, 498 op.is_init, 499 op.error_kind, 500 rtype, 501 ) 502 version = "_TRAIT" if cl.is_trait else ""
593 rtype = op_obj.type 594 assert isinstance(rtype, RInstance), rtype 595 class_ir = rtype.class_ir
596 method = rtype.class_ir.get_method(name) 597 assert method is not None 598
705 else: 706 assert False, "op value type must be either str or Value" 707 else:
817 # TODO: support tuple type 818 assert isinstance(op.src_type, RStruct), op.src_type 819 assert op.field in op.src_type.names, "Invalid field name."
818 assert isinstance(op.src_type, RStruct), op.src_type 819 assert op.field in op.src_type.names, "Invalid field name." 820 self.emit_line(
841 src_type = op.src.type 842 assert isinstance(src_type, RStruct), src_type 843 init_items = []
938 def emit_attribute_error(self, op: Branch, class_name: str, attr: str) -> None:
939 assert op.traceback_entry is not None
940 globals_static = self.emitter.static_name("globals", self.module_name)
195 ) -> BuildResult: 196 assert options.strict_optional, "strict_optional must be turned on" 197 result = build(
785 """ 786 assert self.group_name is not None 787
63 """ 64 assert not fn.internal 65 return (
311 gen.set_target(fn) 312 assert len(fn.args) in (2, 3), "__ipow__ should only take 2 or 3 arguments" 313 gen.arg_names = ["self", "exp", "mod"]
518 method = cl.get_method(func) 519 assert method is not None 520 generate_wrapper_core(method, emitter, arg_names=["lhs", "rhs"])
611 # way easier to do in IR!)
612 assert is_bool_rprimitive(fn.ret_type), "Only bool return supported for __bool__"
613 emitter.emit_line("return val;")
62 for item in value: 63 assert _is_literal_value(item) 64 self.record_literal(item)
69 for item in value: 70 assert _is_literal_value(item) 71 self.record_literal(item)
73 else: 74 assert False, "invalid literal: %r" % value 75
106 return n + self.frozenset_literals[value] 107 assert False, "invalid literal: %r" % value 108
168 for item in value: 169 assert _is_literal_value(item) 170 index = self.literal_index(item)
129 if unnamed_function(func_name):
130 assert line is not None
131 partial_name = f"{shortname}.{line}"
246 def vtable_entry(self, name: str) -> int:
247 assert self.vtable is not None, "vtable not computed yet"
248 assert name in self.vtable, f"{self.name!r} has no attribute {name!r}"
247 assert self.vtable is not None, "vtable not computed yet"
248 assert name in self.vtable, f"{self.name!r} has no attribute {name!r}"
249 return self.vtable[name]
431 fullname = data["module_name"] + "." + data["name"] 432 assert fullname in ctx.classes, "Class %s not in deser class map" % fullname 433 ir = ctx.classes[fullname]
523 ) 524 assert False, "Bogus vtable .class: %s" % data[".class"] 525
184 def line(self) -> int: 185 assert self._line is not None 186 return self._line
193 def id(self) -> str: 194 assert self.line is not None 195 return get_id_from_name(self.name, self.fullname, self.line)
73 ir = ClassIR(cls["name"], cls["module_name"]) 74 assert ir.fullname not in ctx.classes, "Class %s already in map" % ir.fullname 75 ctx.classes[ir.fullname] = ir
81 func = FuncIR.deserialize(method, ctx) 82 assert func.decl.id not in ctx.functions, ( 83 "Method %s already in map" % func.decl.fullname 84 ) 85 ctx.functions[func.decl.id] = func
113 """The terminator operation of the block.""" 114 assert bool(self.ops) and isinstance(self.ops[-1], ControlOp) 115 return self.ops[-1]
358 super().__init__(dest, line) 359 assert src 360 assert isinstance(dest.type, RArray)
359 assert src 360 assert isinstance(dest.type, RArray) 361 assert dest.type.length == len(src)
360 assert isinstance(dest.type, RArray) 361 assert dest.type.length == len(src) 362 self.src = src
402 def set_target(self, i: int, new: BasicBlock) -> None: 403 assert i == 0 404 self.label = new
412 def set_sources(self, new: list[Value]) -> None: 413 assert not new 414
466 def set_target(self, i: int, new: BasicBlock) -> None: 467 assert i == 0 or i == 1 468 if i == 0:
541 def set_sources(self, new: list[Value]) -> None: 542 assert not new 543
566 super().__init__(line) 567 assert self.error_kind != -1, "error_kind not defined" 568
579 def __init__(self, src: Value, line: int = -1) -> None: 580 assert src.type.is_refcounted 581 super().__init__(line)
604 def __init__(self, src: Value, is_xdec: bool = False, line: int = -1) -> None: 605 assert src.type.is_refcounted 606 super().__init__(line)
632 self.args = list(args) 633 assert len(self.args) == len(fn.sig.args) 634 self.type = fn.sig.ret_type
659 self.args = args 660 assert isinstance(obj.type, RInstance), "Methods can only be called on instances" 661 self.receiver_type = obj.type
662 method_ir = self.receiver_type.class_ir.method_sig(method)
663 assert method_ir is not None, "{} doesn't have method {}".format(
664 self.receiver_type.name, method
665 )
666 ret_type = method_ir.ret_type
730 if is_pure: 731 assert error_kind == ERR_NEVER 732
765 if isinstance(steals, list): 766 assert len(steals) == len(self.args) 767 return [arg for arg, steal in zip(self.args, steals) if steal]
799 def set_sources(self, new: list[Value]) -> None: 800 assert not new 801
835 def set_sources(self, new: list[Value]) -> None: 836 assert not new 837
860 self.allow_error_value = allow_error_value 861 assert isinstance(obj.type, RInstance), "Attribute access not supported: %s" % obj.type 862 self.class_type = obj.type
894 self.src = src 895 assert isinstance(obj.type, RInstance), "Attribute access not supported: %s" % obj.type 896 self.class_type = obj.type
967 def set_sources(self, new: list[Value]) -> None: 968 assert not new 969
1049 self.index = index 1050 assert isinstance(src.type, RTuple), "TupleGet only operates on tuples" 1051 assert index >= 0
1050 assert isinstance(src.type, RTuple), "TupleGet only operates on tuples" 1051 assert index >= 0 1052 self.type = src.type.types[index]
1192 def set_sources(self, new: list[Value]) -> None: 1193 assert not new 1194
1238 if is_pure: 1239 assert error_kind == ERR_NEVER 1240
1248 if isinstance(self.steals, list): 1249 assert len(self.steals) == len(self.args) 1250 return [arg for arg, steal in zip(self.args, self.steals) if steal]
1345 def set_sources(self, new: list[Value]) -> None: 1346 assert not new 1347
1593 # TODO: Support other native integer types 1594 assert is_pointer_rprimitive(src.type) 1595 self.src = src
1680 super().__init__(line) 1681 assert isinstance(src.type, RStruct), src.type 1682 self.type = src.type
1726 if new: 1727 assert isinstance(new[0], Register) 1728 assert len(new) == 1
1727 assert isinstance(new[0], Register) 1728 assert len(new) == 1 1729 self.src = new[0]
1763 def __init__(self, src: list[Value], *, steal: bool = False) -> None: 1764 assert src 1765 self.src = src
1812 super().__init__(line) 1813 assert src.is_borrowed 1814 self.src = src
140 if op.is_init: 141 assert op.error_kind == ERR_NEVER 142 if op.error_kind == ERR_NEVER:
219 else: 220 assert False, "value type must be either str or Value" 221 else:
338 # Register/value 339 assert isinstance(arg, Value) 340 if isinstance(arg, Integer):
357 # Basic block (label)
358 assert isinstance(arg, BasicBlock)
359 result.append("L%s" % arg.label)
361 # RType 362 assert isinstance(arg, RType) 363 result.append(arg.name)
132 else:
133 assert False, f"Can't find class {data}"
134 elif data[".class"] == "RTuple":
266 else: 267 assert False, "Unrecognized ctype: %r" % ctype 268
679 return "F"
680 assert not t.is_unboxed, f"{t} unexpected unboxed type"
681 return "O"
687 def visit_rstruct(self, t: RStruct) -> str: 688 assert False, "RStruct not supported in tuple" 689
690 def visit_rarray(self, t: RArray) -> str: 691 assert False, "RArray not supported in tuple" 692
693 def visit_rvoid(self, t: RVoid) -> str: 694 assert False, "rvoid in tuple?" 695
785 else: 786 assert False, "invalid rtype for computing alignment" 787 max_alignment = max(compute_rtype_alignment(item) for item in items)
807 else: 808 assert False, "invalid rtype for computing size" 809
885 def serialize(self) -> JsonDict: 886 assert False 887
889 def deserialize(cls, data: JsonDict, ctx: DeserMaps) -> RStruct: 890 assert False 891
975 items = flatten_nested_unions(items) 976 assert items 977
1082 def serialize(self) -> JsonDict: 1083 assert False 1084
1086 def deserialize(cls, data: JsonDict, ctx: DeserMaps) -> RArray: 1087 assert False 1088
563 ) -> None: 564 assert isinstance(lvalue, NameExpr), lvalue 565 assert isinstance(lvalue.node, Var), lvalue.node
564 assert isinstance(lvalue, NameExpr), lvalue 565 assert isinstance(lvalue.node, Var), lvalue.node 566 if lvalue.node.final_value is None:
570 name = f"{class_name}.{lvalue.name}"
571 assert name is not None, "Full name not set for variable"
572 coerced = self.coerce(rvalue_reg, type_override or self.node_type(lvalue), lvalue.line)
579 split_name = split_target(self.graph, fullname) 580 assert split_name is not None 581 module, name = split_name
622 else: 623 assert False, "Unsupported literal value" 624
637 # Semantic analyzer doesn't create ad-hoc Vars for special forms. 638 assert lvalue.is_special_form 639 symbol = Var(lvalue.name)
671 else: 672 assert False, lvalue.kind 673 elif isinstance(lvalue, IndexExpr):
699 700 assert False, "Unsupported lvalue: %r" % lvalue 701
714 return reg 715 assert False, target.base.type 716 if isinstance(target, AssignmentTargetAttr):
722 723 assert False, "Unsupported lvalue: %r" % target 724
726 """Read an attribute that might have an error value without raising AttributeError.""" 727 assert isinstance(obj.type, RInstance) and obj.type.class_ir.is_ext_class 728 return self.add(GetAttr(obj, attr, line, allow_error_value=True))
747 ) 748 assert target_reg2 is not None, target.base.type 749 elif isinstance(target, AssignmentTargetTuple):
751 rtypes = rvalue_reg.type.types 752 assert len(rtypes) == len(target.items) 753 for i in range(len(rtypes)):
762 else: 763 assert False, "Unsupported assignment target" 764
981 return RUnion.make_simplified_union(list(rtypes)) 982 assert False, target_type 983
1014 else:
1015 assert False, f"Failed to extract dict base from {target_type}"
1016 return [map_instance_to_supertype(target_type, dict_base)]
1296 """ 1297 assert isinstance(symbol, SymbolNode), symbol 1298 reg = Register(
1311 target = self.symtables[-1][symbol] 1312 assert isinstance(target, AssignmentTargetRegister), target 1313 return target
1365 def is_builtin_ref_expr(self, expr: RefExpr) -> bool:
1366 assert expr.node, "RefExpr not resolved"
1367 return "." in expr.node.fullname and expr.node.fullname.split(".")[0] == "builtins"
1376 if self.is_builtin_ref_expr(expr): 1377 assert expr.node, "RefExpr not resolved" 1378 return self.load_module_attr_by_fullname(expr.node.fullname, expr.line)
1383 ): 1384 assert expr.fullname 1385 return self.load_native_type_object(expr.fullname)
1453 def get_default() -> Value: 1454 assert arg.initializer is not None 1455
1472 1473 assert isinstance(target, AssignmentTargetRegister), target 1474 reg = target.register
1527 else: 1528 assert type_param.kind == PARAM_SPEC_KIND 1529 tvt = builder.py_get_attr(typing_mod, "ParamSpec", line)
1568 else: 1569 assert func_reg is not None 1570 builder.add(SetAttr(func_reg, arg.variable.name, value, arg.line))
387 super().finalize(ir) 388 assert self.type_obj 389 add_dunders_to_non_ext_dict(
429 lvalue = stmt.lvalues[0] 430 assert isinstance(lvalue, NameExpr), lvalue 431 return type_name.node
777 lvalue = stmt.lvalues[0] 778 assert isinstance(lvalue, NameExpr), lvalue 779 if not stmt.is_final_def and not is_constant(stmt.rvalue):
820 func_ir = cls.get_method("__eq__")
821 assert func_ir
822 eq_sig = func_ir.decl.sig
885 decorator = d.accept(builder.visitor) 886 assert isinstance(decorator, Value), decorator 887 dec_class = builder.py_call(decorator, [dec_class], dec_class.line)
896 for lval, rtype in attrs_to_cache: 897 assert isinstance(lval, NameExpr), lval 898 rval = builder.py_get_attr(typ, lval.name, cdef.line)
67 def callable_class(self) -> ImplicitClass: 68 assert self._callable_class is not None 69 return self._callable_class
76 def env_class(self) -> ClassIR: 77 assert self._env_class is not None 78 return self._env_class
85 def generator_class(self) -> GeneratorClass: 86 assert self._generator_class is not None 87 return self._generator_class
94 def curr_env_reg(self) -> Value: 95 assert self._curr_env_reg is not None 96 return self._curr_env_reg
126 def self_reg(self) -> Value: 127 assert self._self_reg is not None 128 return self._self_reg
135 def curr_env_reg(self) -> Value: 136 assert self._curr_env_reg is not None 137 return self._curr_env_reg
144 def prev_env_reg(self) -> Value: 145 assert self._prev_env_reg is not None 146 return self._prev_env_reg
181 def next_label_reg(self) -> Value: 182 assert self._next_label_reg is not None 183 return self._next_label_reg
190 def next_label_target(self) -> AssignmentTarget: 191 assert self._next_label_target is not None 192 return self._next_label_target
139 env = builder.add(GetAttr(base, ENV_ATTR_NAME, builder.fn_info.fitem.line))
140 assert isinstance(env.type, RInstance), f"{env} must be of type RInstance"
141
204 rtype = builder.type_to_rtype(arg.variable.type) 205 assert base is not None, "base cannot be None for adding nonlocal args" 206 builder.add_var_to_env_class(
287 else: 288 assert o.info is not None 289 typ = builder.load_native_type_object(o.info.fullname)
376 # Call a method via the *class* 377 assert isinstance(callee.expr.node, TypeInfo), callee.expr.node 378 ir = builder.mapper.type_to_ir[callee.expr.node]
482 # object.__new__(cls)
483 assert (
484 len(expr.args) == 1
485 ), f"Expected object.__new__() call to have exactly 1 argument, got {len(expr.args)}"
486 typ_arg = expr.args[0]
797 tuple_val = builder.accept(rhs) 798 assert isinstance(tuple_val.type, RTuple) 799 items = [builder.add(TupleGet(tuple_val, i)) for i in range(len(tuple_val.type.types))]
801 if items is not None: 802 assert left is not None 803 n_items = len(items)
166 """ 167 assert is_sequence_rprimitive(expr_reg.type) 168 target_type = builder.get_sequence_type(expr)
444 step = builder.extract_int(expr.args[2]) 445 assert step is not None 446 if step == 0:
791 def init(self, expr_reg: Value, target_type: RType, reverse: bool) -> None: 792 assert is_sequence_rprimitive(expr_reg.type), expr_reg 793 builder = self.builder
851 ) 852 assert value_box 853 # We coerce to the type of list elements here so that
996 # Coerce just in case e.g. key is itself a tuple to be unpacked. 997 assert isinstance(self.target_type, RTuple), self.target_type 998 key = builder.coerce(key, self.target_type.types[0], line)
1140 def init(self, indexes: list[Lvalue], exprs: list[Expression]) -> None: 1141 assert len(indexes) == len(exprs) 1142 # Condition check will require multiple basic blocks, since there will be
105 # Handle regular overload case 106 assert o.impl 107 builder.accept(o.impl)
142 typ = get_proper_type(builder.types[expr]) 143 assert isinstance(typ, CallableType), typ 144
157 func_ir, func_reg = gen_func_item(builder, expr, fname, fsig) 158 assert func_reg is not None 159
276 # create the dispatch function 277 assert isinstance(fitem, FuncDef), fitem 278 return gen_dispatch_func_ir(builder, fitem, fn_info.name, name, sig)
344 fitem = fn_info.fitem 345 assert isinstance(fitem, FuncDef), fitem 346 func_decl = builder.mapper.func_to_decl[fitem]
394 # We populate the optional setter field with none for now. 395 assert name not in class_ir.properties 396 class_ir.properties[name] = (func_ir, None)
399 # The respective property getter must have been processed already 400 assert name in class_ir.properties 401 getter_ir, _ = class_ir.properties[name]
440 func_ir, func_reg = gen_func_item(builder, fdef, name, sig, cdef) 441 assert func_reg is not None 442 builder.functions.append(func_ir)
499 decorator = d.accept(builder.visitor) 500 assert isinstance(decorator, Value), decorator 501 func_reg = builder.py_call(decorator, [func_reg], func_reg.line)
1054 value_reg = builder.add_argument("value", func_decl.sig.args[1].type)
1055 assert name.startswith(PROPSET_PREFIX)
1056 attr_name = name[len(PROPSET_PREFIX) :]
162 mapper = builder.mapper 163 assert isinstance(builder.fn_info.fitem, FuncDef), builder.fn_info.fitem 164 generator_class_ir = mapper.fdef_to_generator[builder.fn_info.fitem]
205 cls = builder.fn_info.generator_class 206 assert cls.exc_regs is not None 207 exc_type, exc_val, exc_tb = cls.exc_regs
278 """Add an op.""" 279 assert not self.blocks[-1].terminated, "Can't add to finished block" 280 self.blocks[-1].ops.append(op)
290 if self.blocks: 291 assert self.blocks[-1].terminated 292
457 def coerce_int_to_fixed_width(self, src: Value, target_type: RType, line: int) -> Value: 458 assert is_fixed_width_rtype(target_type), target_type 459 assert isinstance(target_type, RPrimitive), target_type
458 assert is_fixed_width_rtype(target_type), target_type 459 assert isinstance(target_type, RPrimitive), target_type 460
535 else: 536 assert False, target_type 537
546 # TODO: i32 on 64-bit platform 547 assert False, (src.type, target_type, PLATFORM_SIZE) 548
566 567 assert is_fixed_width_rtype(src_type), src_type 568 assert isinstance(src_type, RPrimitive), src_type
567 assert is_fixed_width_rtype(src_type), src_type 568 assert isinstance(src_type, RPrimitive), src_type 569
584 elif is_int32_rprimitive(src_type): 585 assert PLATFORM_SIZE == 4 586 conv_op = ssize_t_to_int_op
587 else: 588 assert False, src_type 589 x = self.call_c(conv_op, [src], line)
877 if maybe_named and star2_result is None: 878 assert name is not None 879 key = self.load_str(name)
913 self.activate_block(pos_block) 914 assert star_result 915 self.translate_special_method_call(
921 self.activate_block(named_block) 922 assert name is not None 923 key = self.load_str(name)
923 key = self.load_str(name) 924 assert star2_result 925 self.translate_special_method_call(
930 if nullable and maybe_pos and new_seen_empty_reg: 931 assert skip is not out 932 self.activate_block(skip)
939 940 assert not (star_result or star_values) or has_star 941 assert not (star2_result or star2_values) or has_star2
940 assert not (star_result or star_values) or has_star 941 assert not (star2_result or star2_values) or has_star2 942 if has_star:
977 # Otherwise fallback to py_call_with_posargs_op or py_call_with_kwargs_op. 978 assert arg_names is not None 979
982 ) 983 assert pos_args_tuple 984
1181 if arg.kind == ARG_STAR: 1182 assert star_arg 1183 output_arg = star_arg
1247 if arg_kinds is None: 1248 assert arg_names is None, "arg_kinds not present but arg_names is" 1249 arg_kinds = [ARG_POS for _ in arg_values]
1251 else: 1252 assert arg_names is not None, "arg_kinds present but arg_names is not" 1253
1254 # Normalize args to positionals. 1255 assert decl.bound_sig 1256 arg_values = self.native_args_to_positional(
1502 target = self.matching_primitive_op(primitive_ops_candidates, [lreg, rreg], line) 1503 assert target, "Unsupported binary operation: %s" % op 1504 return target
1595 # type cast to pass mypy check 1596 assert isinstance(lhs.type, RTuple) and isinstance(rhs.type, RTuple), (lhs.type, rhs.type) 1597 equal = True if op == "==" else False
1658 else: 1659 assert False, op 1660 return self.add(IntOp(bool_rprimitive, lreg, rreg, code, line))
1672 target = self.matching_primitive_op(primitive_ops_candidates, [value], line) 1673 assert target, "Unsupported unary operation: %s" % op 1674 return target
1981 if desc.ordering is not None: 1982 assert desc.var_arg_type is None 1983 coerced = [coerced[i] for i in desc.ordering]
2055 if matching:
2056 assert matching.priority != desc.priority, "Ambiguous:\n1) {}\n2) {}".format(
2057 matching, desc
2058 )
2059 if desc.priority > matching.priority:
2105 arg = args[i] 2106 assert formal_type is not None # TODO 2107 arg = self.coerce(arg, formal_type, line)
2108 coerced.append(arg) 2109 assert desc.ordering is None 2110 assert desc.var_arg_type is None
2109 assert desc.ordering is None 2110 assert desc.var_arg_type is None 2111 assert not desc.extra_int_constants
2110 assert desc.var_arg_type is None 2111 assert not desc.extra_int_constants 2112 target = self.add(PrimitiveOp(coerced, desc, line=line))
2124 2125 assert desc.truncated_type is None 2126 result = target
2153 if matching:
2154 assert matching.priority != desc.priority, "Ambiguous:\n1) {}\n2) {}".format(
2155 matching, desc
2156 )
2157 if desc.priority > matching.priority:
2290 else: 2291 assert False, type 2292 return self.call_c(prim, [lhs, rhs], line)
2310 else: 2311 assert False, type 2312 return self.call_c(prim, [lhs, rhs], line)
2404 # TODO: Support use_pyssize_t 2405 assert not use_pyssize_t 2406 length = self.gen_method_call(val, "__len__", [], int_rprimitive, line)
2616 value_typ = optional_value_type(lreg.type) 2617 assert value_typ 2618 res = Register(bool_rprimitive)
2644 ) 2645 assert eq is not None 2646 self.add(Assign(res, eq))
2665 ) 2666 assert eq is not None 2667 self.add(Assign(res, eq))
2695 def error(self, msg: str, line: int) -> None: 2696 assert self.errors is not None, "cannot generate errors in this compiler phase" 2697 self.errors.error(msg, self.module_path, line)
78 # Unwrap NewType to its base type for rprimitive mapping 79 assert len(typ.type.bases) == 1, typ.type.bases 80 return self.type_to_rtype(typ.type.bases[0])
151 elif isinstance(typ, PartialType): 152 assert typ.var.type is not None 153 return self.type_to_rtype(typ.var.type)
165 # actually show up, so anything else is a bug somewhere. 166 assert False, "unexpected type %s" % type(typ) 167
153 node = pattern.class_ref.node 154 assert isinstance(node, TypeInfo), node 155 match_args = extract_dunder_match_args_names(node)
345 ty = info.names.get("__match_args__")
346 assert ty
347 match_args_type = get_proper_type(ty.type)
347 match_args_type = get_proper_type(ty.type) 348 assert isinstance(match_args_type, TupleType), match_args_type 349
358 match_arg = proper_item.value
359 assert isinstance(match_arg, str), f"Unrecognized __match_args__ item: {item}"
360
59 def gen_break(self, builder: IRBuilder, line: int) -> None: 60 assert False, "break outside of loop" 61
62 def gen_continue(self, builder: IRBuilder, line: int) -> None: 63 assert False, "continue outside of loop" 64
115 stop_iter_reg = builder.fn_info.generator_class.stop_iter_value_reg 116 assert stop_iter_reg is not None 117
177 # and infers the type as object 178 assert isinstance(self.ret_reg, (Register, AssignmentTarget)), self.ret_reg 179 builder.assign(self.ret_reg, value, line)
143 node = cdef.info.names[attr].node 144 assert node is not None 145 kind = "trait" if base_ir.is_trait else "class"
268 if node.func.is_property:
269 assert node.func.type, f"Expected return type annotation for property '{node.name}'"
270 decl.is_prop_getter = True
447 if isinstance(node.node, Var):
448 assert node.node.type, "Class member %s missing type" % name
449 if not node.node.is_classvar and name not in ("__slots__", "__deletable__"):
174 lvalues = stmt.lvalues 175 assert lvalues 176 builder.disallow_class_assignments(lvalues, stmt.line)
397 # If statements are normalized 398 assert len(stmt.expr) == 1 399
440 def else_block() -> None: 441 assert s.else_body is not None 442 builder.accept(s.else_body)
477 The point of this is to also be able to support with.""" 478 assert handlers, "try needs except" 479
919 if is_native: 920 assert isinstance(mgr_v.type, RInstance), mgr_v.type 921 exit_val = builder.gen_method_call(
928 else: 929 assert exit_ is not None 930 exit_val = builder.py_call(builder.read(exit_), [builder.read(mgr)] + args, line)
1055 1056 assert cls.send_arg_reg is not None 1057 return cls.send_arg_reg
216 if isinstance(op, OverloadedFuncDef): 217 assert op.impl 218 op = op.impl
165 def visit_mypy_file(self, mypyfile: MypyFile) -> None: 166 assert False, "use transform_mypy_file instead" 167
348 def visit_enum_call_expr(self, o: EnumCallExpr) -> Value: 349 assert False, "can't compile analysis-only expressions" 350
351 def visit__promote_expr(self, o: PromoteExpr) -> Value: 352 assert False, "can't compile analysis-only expressions" 353
354 def visit_namedtuple_expr(self, o: NamedTupleExpr) -> Value: 355 assert False, "can't compile analysis-only expressions" 356
357 def visit_newtype_expr(self, o: NewTypeExpr) -> Value: 358 assert False, "can't compile analysis-only expressions" 359
360 def visit_temp_node(self, o: TempNode) -> Value: 361 assert False, "can't compile analysis-only expressions" 362
363 def visit_type_alias_expr(self, o: TypeAliasExpr) -> Value: 364 assert False, "can't compile analysis-only expressions" 365
366 def visit_type_application(self, o: TypeApplication) -> Value: 367 assert False, "can't compile analysis-only expressions" 368
369 def visit_type_var_expr(self, o: TypeVarExpr) -> Value: 370 assert False, "can't compile analysis-only expressions" 371
372 def visit_paramspec_expr(self, o: ParamSpecExpr) -> Value: 373 assert False, "can't compile analysis-only expressions" 374
375 def visit_type_var_tuple_expr(self, o: TypeVarTupleExpr) -> Value: 376 assert False, "can't compile analysis-only expressions" 377
378 def visit_typeddict_expr(self, o: TypedDictExpr) -> Value: 379 assert False, "can't compile analysis-only expressions" 380
381 def visit_reveal_expr(self, o: RevealExpr) -> Value: 382 assert False, "can't compile analysis-only expressions" 383
384 def visit_var(self, o: Var) -> None: 385 assert False, "can't compile Var; should have been handled already?" 386
387 def visit_cast_expr(self, o: CastExpr) -> Value: 388 assert False, "CastExpr should have been handled in CallExpr" 389
390 def visit_assert_type_expr(self, o: AssertTypeExpr) -> Value: 391 assert False, "AssertTypeExpr should have been handled in CallExpr" 392
393 def visit_star_expr(self, o: StarExpr) -> Value: 394 assert False, "should have been handled in Tuple/List/Set/DictExpr or CallExpr" 395
30 if cls.base: 31 assert cls.base.vtable is not None 32 cls.vtable.update(cls.base.vtable)
63 orig_parent_method = entry.cls.get_method(entry.name, prefer_method=True) 64 assert orig_parent_method 65 method_cls = cls.get_method_and_class(entry.name, prefer_method=True)
24 value = args[2] 25 assert isinstance(index_value, Integer), index_value 26 index = index_value.numeric_value()
19 def wrapper(f: LF) -> LF: 20 assert name not in lowering_registry 21 lowering_registry[name] = f
218 """ 219 assert c_function_name is not None or primitive_name is not None 220 assert not (c_function_name is not None and primitive_name is not None)
219 assert c_function_name is not None or primitive_name is not None 220 assert not (c_function_name is not None and primitive_name is not None) 221 if extra_int_constants is None:
360 def load_address_op(name: str, type: RType, src: str) -> LoadAddressDescription: 361 assert name not in builtin_names, "already defined: %s" % name 362 builtin_names[name] = (type, src)
64 else: 65 assert False, "No recognized _AnalysisName suffix in test case" 66
22 if name.startswith("CPy"):
23 assert re.search(
24 rf"\b{name}\b", header
25 ), f'"{name}" is used in mypyc.primitives but not declared in CPy.h'
26
11 import re 12 import subprocess 13 import sys
36 m = re.search(r"# *cmd: *(.*)", text) 37 assert m is not None, 'Test case missing "# cmd: <files>" section' 38 args = m.group(1).split()
51 # Compile program 52 cmd = subprocess.run( 53 [sys.executable, "-m", "mypyc", *args], 54 stdout=subprocess.PIPE, 55 stderr=subprocess.STDOUT, 56 cwd="tmp", 57 env=env, 58 ) 59 if "ErrorOutput" in testcase.name or cmd.returncode != 0:
68 # Run main program 69 out += subprocess.check_output([python3_path, program], cwd="tmp") 70 finally:
35 def test_label(self) -> None: 36 assert self.emitter.label(BasicBlock(4)) == "CPyL4" 37
40 emitter = Emitter(self.context, names) 41 assert emitter.reg(self.n) == "cpy_r_n" 42
43 def test_object_annotation(self) -> None:
44 assert self.emitter.object_annotation("hello, world", "line;") == " /* 'hello, world' */"
45 assert (
44 assert self.emitter.object_annotation("hello, world", "line;") == " /* 'hello, world' */"
45 assert (
46 self.emitter.object_annotation(list(range(30)), "line;")
47 == """\
48 /* [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22,
49 23, 24, 25, 26, 27, 28, 29] */"""
50 )
51
57 emitter.emit_line("}")
58 assert emitter.fragments == ["line;\n", "a {\n", " f();\n", "}\n"]
59 emitter = Emitter(self.context, {})
61 emitter.emit_line("CPyStatics[1];", ann=list(range(30)))
62 assert emitter.fragments[0] == "CPyStatics[0]; /* 'hello, world' */\n"
63 assert (
62 assert emitter.fragments[0] == "CPyStatics[0]; /* 'hello, world' */\n" 63 assert ( 64 emitter.fragments[1] 65 == """\ 66 CPyStatics[1]; /* [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 67 21, 22, 23, 24, 25, 26, 27, 28, 29] */\n""" 68 ) 69
71 emitter = self.emitter 72 assert emitter.c_undefined_value(int_rprimitive) == "CPY_INT_TAG" 73 assert emitter.c_undefined_value(str_rprimitive) == "NULL"
72 assert emitter.c_undefined_value(int_rprimitive) == "CPY_INT_TAG" 73 assert emitter.c_undefined_value(str_rprimitive) == "NULL" 74 assert emitter.c_undefined_value(bool_rprimitive) == "2"
73 assert emitter.c_undefined_value(str_rprimitive) == "NULL" 74 assert emitter.c_undefined_value(bool_rprimitive) == "2" 75
77 emitter = self.emitter
78 assert (
79 emitter.c_undefined_value(RTuple([str_rprimitive, int_rprimitive, bool_rprimitive]))
80 == "(tuple_T3OIC) { NULL, CPY_INT_TAG, 2 }"
81 )
82 assert emitter.c_undefined_value(RTuple([str_rprimitive])) == "(tuple_T1O) { NULL }"
81 )
82 assert emitter.c_undefined_value(RTuple([str_rprimitive])) == "(tuple_T1O) { NULL }"
83 assert (
82 assert emitter.c_undefined_value(RTuple([str_rprimitive])) == "(tuple_T1O) { NULL }"
83 assert (
84 emitter.c_undefined_value(RTuple([RTuple([str_rprimitive]), bool_rprimitive]))
85 == "(tuple_T2T1OC) { { NULL }, 2 }"
86 )
87
168 def assert_output(self, expected: str) -> None: 169 assert "".join(self.emitter.fragments) == expected 170 self.emitter.fragments = []
14 # __delitem__ and reverse methods should come last. 15 assert s == [ 16 "__add__", 17 "__rshift__", 18 "__setitem__", 19 "__delitem__", 20 "__radd__", 21 "__rrshift__", 22 ] 23
28 # This should never be `setup`, as it will conflict with the class `setup` 29 assert setter_name(cls, "up", generator) == "testing___SomeClass_set_up" 30
34 35 assert getter_name(cls, "down", generator) == "testing___SomeClass_get_down"
169 def test_int_neg(self) -> None: 170 assert int_neg_op.c_function_name is not None 171 self.assert_emit(
916 actual_lines = [line.strip(" ") for line in frags]
917 assert all(line.endswith("\n") for line in actual_lines)
918 actual_lines = [line.rstrip("\n") for line in actual_lines]
927 if skip_next: 928 assert visitor.op_index == 1 929 else:
929 else: 930 assert visitor.op_index == 0 931
944 # This only supports primitives that map to C calls 945 assert desc.c_function_name is not None 946 self.assert_emit(
959 else: 960 assert False, "Could not find matching op" 961
5 import os 6 import subprocess 7 import sys
29 with tempfile.TemporaryDirectory() as tmpdir:
30 status = subprocess.check_call(
31 [
32 sys.executable,
33 "setup.py",
34 "build_ext",
35 f"--build-lib={tmpdir}",
36 f"--build-temp={tmpdir}",
37 "--run-capi-tests",
38 ],
39 env=env,
40 cwd=os.path.join(base_dir, "mypyc", "lib-rt"),
41 )
42 # Run C unit tests.
45 env["GTEST_COLOR"] = "yes" # Use fancy colors 46 status = subprocess.call( 47 [sys.executable, "-c", "import sys, test_capi; sys.exit(test_capi.run_tests())"], 48 env=env, 49 cwd=tmpdir, 50 ) 51 if status != 0:
35 errors = check_func_ir(fn) 36 assert errors == [error] 37
39 def assert_no_errors(fn: FuncIR) -> None: 40 assert not check_func_ir(fn) 41
154 for src, dest in valid_cases: 155 assert can_coerce_to(src, dest) 156 for src, dest in invalid_cases:
156 for src, dest in invalid_cases: 157 assert not can_coerce_to(src, dest) 158
177 formatted = format_func(fn, errors) 178 assert formatted == [ 179 "def func_1():", 180 "L0:", 181 " goto L1", 182 " ERR: Invalid control operation target: 1", 183 ] 184
17 def test_format_str_literal(self) -> None:
18 assert format_str_literal("") == b"\x00"
19 assert format_str_literal("xyz") == b"\x03xyz"
18 assert format_str_literal("") == b"\x00"
19 assert format_str_literal("xyz") == b"\x03xyz"
20 assert format_str_literal("x" * 127) == b"\x7f" + b"x" * 127
19 assert format_str_literal("xyz") == b"\x03xyz"
20 assert format_str_literal("x" * 127) == b"\x7f" + b"x" * 127
21 assert format_str_literal("x" * 128) == b"\x81\x00" + b"x" * 128
20 assert format_str_literal("x" * 127) == b"\x7f" + b"x" * 127
21 assert format_str_literal("x" * 128) == b"\x81\x00" + b"x" * 128
22 assert format_str_literal("x" * 131) == b"\x81\x03" + b"x" * 131
21 assert format_str_literal("x" * 128) == b"\x81\x00" + b"x" * 128
22 assert format_str_literal("x" * 131) == b"\x81\x03" + b"x" * 131
23
24 def test_encode_str_values(self) -> None:
25 assert _encode_str_values({}) == [b""]
26 assert _encode_str_values({"foo": 0}) == [b"\x01\x03foo", b""]
25 assert _encode_str_values({}) == [b""]
26 assert _encode_str_values({"foo": 0}) == [b"\x01\x03foo", b""]
27 assert _encode_str_values({"foo": 0, "b": 1}) == [b"\x02\x03foo\x01b", b""]
26 assert _encode_str_values({"foo": 0}) == [b"\x01\x03foo", b""]
27 assert _encode_str_values({"foo": 0, "b": 1}) == [b"\x02\x03foo\x01b", b""]
28 assert _encode_str_values({"foo": 0, "x" * 70: 1}) == [
27 assert _encode_str_values({"foo": 0, "b": 1}) == [b"\x02\x03foo\x01b", b""]
28 assert _encode_str_values({"foo": 0, "x" * 70: 1}) == [
29 b"\x01\x03foo",
30 bytes([1, 70]) + b"x" * 70,
31 b"",
32 ]
33 assert _encode_str_values({"y" * 100: 0}) == [bytes([1, 100]) + b"y" * 100, b""]
32 ]
33 assert _encode_str_values({"y" * 100: 0}) == [bytes([1, 100]) + b"y" * 100, b""]
34
35 def test_encode_bytes_values(self) -> None:
36 assert _encode_bytes_values({}) == [b""]
37 assert _encode_bytes_values({b"foo": 0}) == [b"\x01\x03foo", b""]
36 assert _encode_bytes_values({}) == [b""]
37 assert _encode_bytes_values({b"foo": 0}) == [b"\x01\x03foo", b""]
38 assert _encode_bytes_values({b"foo": 0, b"b": 1}) == [b"\x02\x03foo\x01b", b""]
37 assert _encode_bytes_values({b"foo": 0}) == [b"\x01\x03foo", b""]
38 assert _encode_bytes_values({b"foo": 0, b"b": 1}) == [b"\x02\x03foo\x01b", b""]
39 assert _encode_bytes_values({b"foo": 0, b"x" * 70: 1}) == [
38 assert _encode_bytes_values({b"foo": 0, b"b": 1}) == [b"\x02\x03foo\x01b", b""]
39 assert _encode_bytes_values({b"foo": 0, b"x" * 70: 1}) == [
40 b"\x01\x03foo",
41 bytes([1, 70]) + b"x" * 70,
42 b"",
43 ]
44 assert _encode_bytes_values({b"y" * 100: 0}) == [bytes([1, 100]) + b"y" * 100, b""]
43 ]
44 assert _encode_bytes_values({b"y" * 100: 0}) == [bytes([1, 100]) + b"y" * 100, b""]
45
46 def test_encode_int_values(self) -> None:
47 assert _encode_int_values({}) == [b""]
48 assert _encode_int_values({123: 0}) == [b"\x01123", b""]
47 assert _encode_int_values({}) == [b""]
48 assert _encode_int_values({123: 0}) == [b"\x01123", b""]
49 assert _encode_int_values({123: 0, 9: 1}) == [b"\x02123\x009", b""]
48 assert _encode_int_values({123: 0}) == [b"\x01123", b""]
49 assert _encode_int_values({123: 0, 9: 1}) == [b"\x02123\x009", b""]
50 assert _encode_int_values({123: 0, 45: 1, 5 * 10**70: 2}) == [
49 assert _encode_int_values({123: 0, 9: 1}) == [b"\x02123\x009", b""]
50 assert _encode_int_values({123: 0, 45: 1, 5 * 10**70: 2}) == [
51 b"\x02123\x0045",
52 b"\x015" + b"0" * 70,
53 b"",
54 ]
55 assert _encode_int_values({6 * 10**100: 0}) == [b"\x016" + b"0" * 100, b""]
54 ]
55 assert _encode_int_values({6 * 10**100: 0}) == [b"\x016" + b"0" * 100, b""]
56
63 lit.record_literal(False) 64 assert lit.literal_index(None) == 0 65 assert lit.literal_index(False) == 1
64 assert lit.literal_index(None) == 0 65 assert lit.literal_index(False) == 1 66 assert lit.literal_index(True) == 2
65 assert lit.literal_index(False) == 1
66 assert lit.literal_index(True) == 2
67 assert lit.literal_index("y") == 3
66 assert lit.literal_index(True) == 2
67 assert lit.literal_index("y") == 3
68 assert lit.literal_index(1) == 4
67 assert lit.literal_index("y") == 3
68 assert lit.literal_index(1) == 4
69
74 lit.record_literal(()) 75 assert lit.literal_index((b"a", "b")) == 7 76 assert lit.literal_index((1, "y", None, (b"a", "b"))) == 8
75 assert lit.literal_index((b"a", "b")) == 7 76 assert lit.literal_index((1, "y", None, (b"a", "b"))) == 8 77 assert lit.literal_index(()) == 9
76 assert lit.literal_index((1, "y", None, (b"a", "b"))) == 8 77 assert lit.literal_index(()) == 9 78 print(lit.encoded_tuple_values())
78 print(lit.encoded_tuple_values()) 79 assert lit.encoded_tuple_values() == [ 80 "3", # Number of tuples 81 "2", 82 "5", 83 "4", # First tuple (length=2) 84 "4", 85 "6", 86 "3", 87 "0", 88 "7", # Second tuple (length=4) 89 "0", # Third tuple (length=0) 90 ]
19 code = format_blocks([block], names, {})
20 assert code[:-1] == ["L0:", " r0 = 'foo'", " CPyDebug_PrintObject(r0)"]
14 def test_candidate_suffixes(self) -> None:
15 assert candidate_suffixes("foo") == ["", "foo."]
16 assert candidate_suffixes("foo.bar") == ["", "bar.", "foo.bar."]
15 assert candidate_suffixes("foo") == ["", "foo."]
16 assert candidate_suffixes("foo.bar") == ["", "bar.", "foo.bar."]
17
18 def test_exported_name(self) -> None:
19 assert exported_name("foo") == "foo"
20 assert exported_name("foo.bar") == "foo___bar"
19 assert exported_name("foo") == "foo"
20 assert exported_name("foo.bar") == "foo___bar"
21
22 def test_make_module_translation_map(self) -> None:
23 assert make_module_translation_map(["foo", "bar"]) == {"foo": "foo.", "bar": "bar."}
24 assert make_module_translation_map(["foo.bar", "foo.baz"]) == {
23 assert make_module_translation_map(["foo", "bar"]) == {"foo": "foo.", "bar": "bar."}
24 assert make_module_translation_map(["foo.bar", "foo.baz"]) == {
25 "foo.bar": "bar.",
26 "foo.baz": "baz.",
27 }
28 assert make_module_translation_map(["zar", "foo.bar", "foo.baz"]) == {
27 }
28 assert make_module_translation_map(["zar", "foo.bar", "foo.baz"]) == {
29 "foo.bar": "bar.",
30 "foo.baz": "baz.",
31 "zar": "zar.",
32 }
33 assert make_module_translation_map(["foo.bar", "fu.bar", "foo.baz"]) == {
32 }
33 assert make_module_translation_map(["foo.bar", "fu.bar", "foo.baz"]) == {
34 "foo.bar": "foo.bar.",
35 "fu.bar": "fu.bar.",
36 "foo.baz": "baz.",
37 }
38 assert make_module_translation_map(["foo", "foo.foo", "bar.foo", "bar.foo.bar.foo"]) == {
37 }
38 assert make_module_translation_map(["foo", "foo.foo", "bar.foo", "bar.foo.bar.foo"]) == {
39 "foo": "foo.",
40 "foo.foo": "foo.foo.",
41 "bar.foo": "bar.foo.",
42 "bar.foo.bar.foo": "foo.bar.foo.",
43 }
44
46 g = NameGenerator([["foo", "foo.zar"]])
47 assert g.private_name("foo", "f") == "foo___f"
48 assert g.private_name("foo", "C.x.y") == "foo___C___x___y"
47 assert g.private_name("foo", "f") == "foo___f"
48 assert g.private_name("foo", "C.x.y") == "foo___C___x___y"
49 assert g.private_name("foo", "C.x.y") == "foo___C___x___y"
48 assert g.private_name("foo", "C.x.y") == "foo___C___x___y"
49 assert g.private_name("foo", "C.x.y") == "foo___C___x___y"
50 assert g.private_name("foo.zar", "C.x.y") == "zar___C___x___y"
49 assert g.private_name("foo", "C.x.y") == "foo___C___x___y"
50 assert g.private_name("foo.zar", "C.x.y") == "zar___C___x___y"
51 assert g.private_name("foo", "C.x_y") == "foo___C___x_y"
50 assert g.private_name("foo.zar", "C.x.y") == "zar___C___x___y"
51 assert g.private_name("foo", "C.x_y") == "foo___C___x_y"
52 assert g.private_name("foo", "C_x_y") == "foo___C_x_y"
51 assert g.private_name("foo", "C.x_y") == "foo___C___x_y"
52 assert g.private_name("foo", "C_x_y") == "foo___C_x_y"
53 assert g.private_name("foo", "C_x_y") == "foo___C_x_y"
52 assert g.private_name("foo", "C_x_y") == "foo___C_x_y"
53 assert g.private_name("foo", "C_x_y") == "foo___C_x_y"
54 assert g.private_name("foo", "___") == "foo______3_"
53 assert g.private_name("foo", "C_x_y") == "foo___C_x_y"
54 assert g.private_name("foo", "___") == "foo______3_"
55
56 g = NameGenerator([["foo.zar"]])
57 assert g.private_name("foo.zar", "f") == "f"
58
60 g = NameGenerator([["foo", "foo.zar"]], separate=True)
61 assert g.private_name("foo", "f") == "foo___f"
62 assert g.private_name("foo", "C.x.y") == "foo___C___x___y"
61 assert g.private_name("foo", "f") == "foo___f"
62 assert g.private_name("foo", "C.x.y") == "foo___C___x___y"
63 assert g.private_name("foo.zar", "C.x.y") == "foo___zar___C___x___y"
62 assert g.private_name("foo", "C.x.y") == "foo___C___x___y"
63 assert g.private_name("foo.zar", "C.x.y") == "foo___zar___C___x___y"
64 assert g.private_name("foo", "C.x_y") == "foo___C___x_y"
63 assert g.private_name("foo.zar", "C.x.y") == "foo___zar___C___x___y"
64 assert g.private_name("foo", "C.x_y") == "foo___C___x_y"
65 assert g.private_name("foo", "___") == "foo______3_"
64 assert g.private_name("foo", "C.x_y") == "foo___C___x_y"
65 assert g.private_name("foo", "___") == "foo______3_"
66
67 g = NameGenerator([["foo.zar"]], separate=True)
68 assert g.private_name("foo.zar", "f") == "foo___zar___f"
21 def test_empty(self) -> None:
22 assert generate_names_for_ir([], []) == {}
23
25 reg = register("foo")
26 assert generate_names_for_ir([reg], []) == {reg: "foo"}
27
33 block = make_block([op1, op2, Unreachable()])
34 assert generate_names_for_ir([], [block]) == {op1: "r0", op2: "r1"}
35
41 block = make_block([op1, op2])
42 assert generate_names_for_ir([reg], [block]) == {reg: "foo"}
19 a = RArray(int_rprimitive, 10) 20 assert a.item_type == int_rprimitive 21 assert a.length == 10
20 assert a.item_type == int_rprimitive 21 assert a.length == 10 22
24 a = RArray(int_rprimitive, 10) 25 assert str(a) == "int[10]" 26 assert repr(a) == "<RArray <RPrimitive builtins.int>[10]>"
25 assert str(a) == "int[10]" 26 assert repr(a) == "<RArray <RPrimitive builtins.int>[10]>" 27
29 a = RArray(int_rprimitive, 10) 30 assert a == RArray(int_rprimitive, 10) 31 assert a != RArray(bool_rprimitive, 10)
30 assert a == RArray(int_rprimitive, 10) 31 assert a != RArray(bool_rprimitive, 10) 32 assert a != RArray(int_rprimitive, 9)
31 assert a != RArray(bool_rprimitive, 10) 32 assert a != RArray(int_rprimitive, 9) 33
34 def test_hash(self) -> None: 35 assert hash(RArray(int_rprimitive, 10)) == hash(RArray(int_rprimitive, 10)) 36 assert hash(RArray(bool_rprimitive, 5)) == hash(RArray(bool_rprimitive, 5))
35 assert hash(RArray(int_rprimitive, 10)) == hash(RArray(int_rprimitive, 10)) 36 assert hash(RArray(bool_rprimitive, 5)) == hash(RArray(bool_rprimitive, 5)) 37
39 a = RArray(int_rprimitive, 10) 40 assert compute_rtype_alignment(a) == PLATFORM_SIZE 41 b = RArray(bool_rprimitive, 55)
41 b = RArray(bool_rprimitive, 55) 42 assert compute_rtype_alignment(b) == 1 43
45 a = RArray(int_rprimitive, 9) 46 assert compute_rtype_size(a) == 9 * PLATFORM_SIZE 47 b = RArray(bool_rprimitive, 3)
47 b = RArray(bool_rprimitive, 3) 48 assert compute_rtype_size(b) == 3
10 import shutil 11 import subprocess 12 import sys
115 with open(script_name, "rb") as f: 116 exec(f.read(), g) 117 finally:
262 errors.flush_errors() 263 assert False, "Compile error" 264 except CompileError as e:
266 print(fix_native_line_number(line, testcase.file, testcase.line)) 267 assert False, "Compile error" 268
288 copy_output_files(mypyc_output_dir) 289 assert False, "Compilation failed" 290
292 suffix = "pyd" if sys.platform == "win32" else "so"
293 assert glob.glob(f"native.*.{suffix}") or glob.glob(f"native.{suffix}")
294
307 if debugger == "lldb": 308 subprocess.check_call(["lldb", "--", sys.executable, driver_path], env=env) 309 elif debugger == "gdb":
307 if debugger == "lldb": 308 subprocess.check_call(["lldb", "--", sys.executable, driver_path], env=env) 309 elif debugger == "gdb":
309 elif debugger == "gdb": 310 subprocess.check_call(["gdb", "--args", sys.executable, driver_path], env=env) 311 else:
309 elif debugger == "gdb": 310 subprocess.check_call(["gdb", "--args", sys.executable, driver_path], env=env) 311 else:
311 else: 312 assert False, "Unsupported debugger" 313 # TODO: find a way to automatically disable capturing
314 # stdin/stdout when in debugging mode 315 assert False, ( 316 "Test can't pass in debugging mode. " 317 "(Make sure to pass -s to pytest to interact with the debugger)" 318 ) 319 proc = subprocess.Popen(
318 ) 319 proc = subprocess.Popen( 320 [sys.executable, driver_path], 321 stdout=subprocess.PIPE, 322 stderr=subprocess.STDOUT, 323 env=env, 324 ) 325 if sys.version_info >= (3, 12):
386 387 assert proc.returncode == 0 388
49
50 assert type(x) is type(y), (f"Type mismatch at {trail}", type(x), type(y))
51 if isinstance(x, (FuncDecl, FuncIR, ClassIR)):
51 if isinstance(x, (FuncDecl, FuncIR, ClassIR)):
52 assert x.fullname == y.fullname, f"Name mismatch at {trail}"
53 elif isinstance(x, dict):
53 elif isinstance(x, dict):
54 assert len(x.keys()) == len(y.keys()), f"Keys mismatch at {trail}"
55 for (xk, xv), (yk, yv) in zip(x.items(), y.items()):
58 elif isinstance(x, dict):
59 assert x.keys() == y.keys(), f"Keys mismatch at {trail}"
60 for k in x.keys():
68 elif isinstance(x, RType):
69 assert is_same_type(x, y), f"RType mismatch at {trail}"
70 elif isinstance(x, FuncSignature):
70 elif isinstance(x, FuncSignature):
71 assert is_same_signature(x, y), f"Signature mismatch at {trail}"
72 else:
72 else:
73 assert x == y, f"Value mismatch at {trail}"
74
82 """ 83 assert ir1.fullname == ir2.fullname 84
84 85 assert ir1.imports == ir2.imports 86
104 irs2 = deserialize_modules(serialized, ctx) 105 assert irs.keys() == irs2.keys() 106
19 r = RStruct("", [], [bool_rprimitive, int32_rprimitive, int64_rprimitive])
20 assert r.size == 16
21 assert r.offsets == [0, 4, 8]
20 assert r.size == 16 21 assert r.offsets == [0, 4, 8] 22
24 r1 = RStruct("", [], [bool_rprimitive, bool_rprimitive])
25 assert r1.size == 2
26 assert r1.offsets == [0, 1]
25 assert r1.size == 2
26 assert r1.offsets == [0, 1]
27 r2 = RStruct("", [], [int32_rprimitive, bool_rprimitive])
28 r3 = RStruct("", [], [int64_rprimitive, bool_rprimitive])
29 assert r2.offsets == [0, 4]
30 assert r3.offsets == [0, 8]
29 assert r2.offsets == [0, 4] 30 assert r3.offsets == [0, 8] 31 assert r2.size == 8
30 assert r3.offsets == [0, 8] 31 assert r2.size == 8 32 assert r3.size == 16
31 assert r2.size == 8 32 assert r3.size == 16 33
34 r4 = RStruct("", [], [bool_rprimitive, bool_rprimitive, bool_rprimitive, int32_rprimitive])
35 assert r4.size == 8
36 assert r4.offsets == [0, 1, 2, 4]
35 assert r4.size == 8 36 assert r4.offsets == [0, 1, 2, 4] 37
39 r5 = RStruct("", [], [bool_rprimitive, r])
40 assert r5.offsets == [0, 8]
41 assert r5.size == 24
40 assert r5.offsets == [0, 8]
41 assert r5.size == 24
42 r6 = RStruct("", [], [int32_rprimitive, r5])
42 r6 = RStruct("", [], [int32_rprimitive, r5])
43 assert r6.offsets == [0, 8]
44 assert r6.size == 32
43 assert r6.offsets == [0, 8] 44 assert r6.size == 32 45 # test nested struct with alignment less than 8
46 r7 = RStruct("", [], [bool_rprimitive, r4])
47 assert r7.offsets == [0, 4]
48 assert r7.size == 12
47 assert r7.offsets == [0, 4] 48 assert r7.size == 12 49
51 r = RStruct("Foo", ["a", "b"], [bool_rprimitive, object_rprimitive])
52 assert str(r) == "Foo{a:bool, b:object}"
53 assert (
52 assert str(r) == "Foo{a:bool, b:object}"
53 assert (
54 repr(r) == "<RStruct Foo{a:<RPrimitive builtins.bool>, "
55 "b:<RPrimitive builtins.object>}>"
56 )
57 r1 = RStruct("Bar", ["c"], [int32_rprimitive])
57 r1 = RStruct("Bar", ["c"], [int32_rprimitive])
58 assert str(r1) == "Bar{c:i32}"
59 assert repr(r1) == "<RStruct Bar{c:<RPrimitive i32>}>"
58 assert str(r1) == "Bar{c:i32}"
59 assert repr(r1) == "<RStruct Bar{c:<RPrimitive i32>}>"
60 r2 = RStruct("Baz", [], [])
60 r2 = RStruct("Baz", [], [])
61 assert str(r2) == "Baz{}"
62 assert repr(r2) == "<RStruct Baz{}>"
61 assert str(r2) == "Baz{}"
62 assert repr(r2) == "<RStruct Baz{}>"
63
84 85 assert is_runtime_subtype(r1, r) is True 86 assert is_runtime_subtype(r2, r) is False
85 assert is_runtime_subtype(r1, r) is True 86 assert is_runtime_subtype(r2, r) is False 87 assert is_runtime_subtype(r3, r) is False
86 assert is_runtime_subtype(r2, r) is False 87 assert is_runtime_subtype(r3, r) is False 88 assert is_runtime_subtype(r4, r) is False
87 assert is_runtime_subtype(r3, r) is False 88 assert is_runtime_subtype(r4, r) is False 89 assert is_runtime_subtype(r5, r) is False
88 assert is_runtime_subtype(r4, r) is False 89 assert is_runtime_subtype(r5, r) is False 90
95 r1 = RStruct("Foo", ["a", "b"], [bool_rprimitive, int_rprimitive])
96 assert hash(r) == hash(r1)
97 assert r == r1
96 assert hash(r) == hash(r1) 97 assert r == r1 98
100 r2 = RStruct("Foq", ["a", "b"], [bool_rprimitive, int_rprimitive])
101 assert hash(r) != hash(r2)
102 assert r != r2
101 assert hash(r) != hash(r2) 102 assert r != r2 103
105 r3 = RStruct("Foo", ["a", "c"], [bool_rprimitive, int_rprimitive])
106 assert hash(r) != hash(r3)
107 assert r != r3
106 assert hash(r) != hash(r3) 107 assert r != r3 108
110 r4 = RStruct("Foo", ["a", "b"], [bool_rprimitive, int_rprimitive, bool_rprimitive])
111 assert hash(r) != hash(r4)
112 assert r != r4
111 assert hash(r) != hash(r4) 112 assert r != r4
22 def test_names(self) -> None: 23 assert RTuple([int_rprimitive, int_rprimitive]).unique_id == "T2II" 24 assert RTuple([list_rprimitive, object_rprimitive, self.inst_a]).unique_id == "T3OOO"
23 assert RTuple([int_rprimitive, int_rprimitive]).unique_id == "T2II" 24 assert RTuple([list_rprimitive, object_rprimitive, self.inst_a]).unique_id == "T3OOO" 25 assert RTuple([list_rprimitive, object_rprimitive, self.inst_b]).unique_id == "T3OOO"
24 assert RTuple([list_rprimitive, object_rprimitive, self.inst_a]).unique_id == "T3OOO" 25 assert RTuple([list_rprimitive, object_rprimitive, self.inst_b]).unique_id == "T3OOO" 26 assert RTuple([]).unique_id == "T0"
25 assert RTuple([list_rprimitive, object_rprimitive, self.inst_b]).unique_id == "T3OOO" 26 assert RTuple([]).unique_id == "T0" 27 assert (
26 assert RTuple([]).unique_id == "T0" 27 assert ( 28 RTuple([RTuple([]), RTuple([int_rprimitive, int_rprimitive])]).unique_id == "T2T0T2II" 29 ) 30 assert (
29 ) 30 assert ( 31 RTuple([bool_rprimitive, RUnion([bool_rprimitive, int_rprimitive])]).unique_id 32 == "T2CO" 33 )
26 def test_bit(self) -> None: 27 assert is_subtype(bit_rprimitive, bool_rprimitive) 28 assert is_subtype(bit_rprimitive, int_rprimitive)
27 assert is_subtype(bit_rprimitive, bool_rprimitive) 28 assert is_subtype(bit_rprimitive, int_rprimitive) 29 assert is_subtype(bit_rprimitive, short_int_rprimitive)
28 assert is_subtype(bit_rprimitive, int_rprimitive) 29 assert is_subtype(bit_rprimitive, short_int_rprimitive) 30 for rt in native_int_types:
30 for rt in native_int_types: 31 assert is_subtype(bit_rprimitive, rt) 32
33 def test_bool(self) -> None: 34 assert not is_subtype(bool_rprimitive, bit_rprimitive) 35 assert is_subtype(bool_rprimitive, int_rprimitive)
34 assert not is_subtype(bool_rprimitive, bit_rprimitive) 35 assert is_subtype(bool_rprimitive, int_rprimitive) 36 assert is_subtype(bool_rprimitive, short_int_rprimitive)
35 assert is_subtype(bool_rprimitive, int_rprimitive) 36 assert is_subtype(bool_rprimitive, short_int_rprimitive) 37 for rt in native_int_types:
37 for rt in native_int_types: 38 assert is_subtype(bool_rprimitive, rt) 39
40 def test_int64(self) -> None: 41 assert is_subtype(int64_rprimitive, int64_rprimitive) 42 assert is_subtype(int64_rprimitive, int_rprimitive)
41 assert is_subtype(int64_rprimitive, int64_rprimitive) 42 assert is_subtype(int64_rprimitive, int_rprimitive) 43 assert not is_subtype(int64_rprimitive, short_int_rprimitive)
42 assert is_subtype(int64_rprimitive, int_rprimitive) 43 assert not is_subtype(int64_rprimitive, short_int_rprimitive) 44 assert not is_subtype(int64_rprimitive, int32_rprimitive)
43 assert not is_subtype(int64_rprimitive, short_int_rprimitive) 44 assert not is_subtype(int64_rprimitive, int32_rprimitive) 45 assert not is_subtype(int64_rprimitive, int16_rprimitive)
44 assert not is_subtype(int64_rprimitive, int32_rprimitive) 45 assert not is_subtype(int64_rprimitive, int16_rprimitive) 46
47 def test_int32(self) -> None: 48 assert is_subtype(int32_rprimitive, int32_rprimitive) 49 assert is_subtype(int32_rprimitive, int_rprimitive)
48 assert is_subtype(int32_rprimitive, int32_rprimitive) 49 assert is_subtype(int32_rprimitive, int_rprimitive) 50 assert not is_subtype(int32_rprimitive, short_int_rprimitive)
49 assert is_subtype(int32_rprimitive, int_rprimitive) 50 assert not is_subtype(int32_rprimitive, short_int_rprimitive) 51 assert not is_subtype(int32_rprimitive, int64_rprimitive)
50 assert not is_subtype(int32_rprimitive, short_int_rprimitive) 51 assert not is_subtype(int32_rprimitive, int64_rprimitive) 52 assert not is_subtype(int32_rprimitive, int16_rprimitive)
51 assert not is_subtype(int32_rprimitive, int64_rprimitive) 52 assert not is_subtype(int32_rprimitive, int16_rprimitive) 53
54 def test_int16(self) -> None: 55 assert is_subtype(int16_rprimitive, int16_rprimitive) 56 assert is_subtype(int16_rprimitive, int_rprimitive)
55 assert is_subtype(int16_rprimitive, int16_rprimitive) 56 assert is_subtype(int16_rprimitive, int_rprimitive) 57 assert not is_subtype(int16_rprimitive, short_int_rprimitive)
56 assert is_subtype(int16_rprimitive, int_rprimitive) 57 assert not is_subtype(int16_rprimitive, short_int_rprimitive) 58 assert not is_subtype(int16_rprimitive, int64_rprimitive)
57 assert not is_subtype(int16_rprimitive, short_int_rprimitive) 58 assert not is_subtype(int16_rprimitive, int64_rprimitive) 59 assert not is_subtype(int16_rprimitive, int32_rprimitive)
58 assert not is_subtype(int16_rprimitive, int64_rprimitive) 59 assert not is_subtype(int16_rprimitive, int32_rprimitive) 60
63 def test_bit(self) -> None: 64 assert is_runtime_subtype(bit_rprimitive, bool_rprimitive) 65 assert not is_runtime_subtype(bit_rprimitive, int_rprimitive)
64 assert is_runtime_subtype(bit_rprimitive, bool_rprimitive) 65 assert not is_runtime_subtype(bit_rprimitive, int_rprimitive) 66
67 def test_bool(self) -> None: 68 assert not is_runtime_subtype(bool_rprimitive, bit_rprimitive) 69 assert not is_runtime_subtype(bool_rprimitive, int_rprimitive)
68 assert not is_runtime_subtype(bool_rprimitive, bit_rprimitive) 69 assert not is_runtime_subtype(bool_rprimitive, int_rprimitive) 70
72 bool_int_mix = RUnion([bool_rprimitive, int_rprimitive]) 73 assert not is_runtime_subtype(bool_int_mix, short_int_rprimitive) 74 assert not is_runtime_subtype(bool_int_mix, int_rprimitive)
73 assert not is_runtime_subtype(bool_int_mix, short_int_rprimitive) 74 assert not is_runtime_subtype(bool_int_mix, int_rprimitive) 75 assert not is_runtime_subtype(short_int_rprimitive, bool_int_mix)
74 assert not is_runtime_subtype(bool_int_mix, int_rprimitive) 75 assert not is_runtime_subtype(short_int_rprimitive, bool_int_mix) 76 assert not is_runtime_subtype(int_rprimitive, bool_int_mix)
75 assert not is_runtime_subtype(short_int_rprimitive, bool_int_mix) 76 assert not is_runtime_subtype(int_rprimitive, bool_int_mix) 77
80 def test_simple_type_result(self) -> None: 81 assert RUnion.make_simplified_union([int_rprimitive]) == int_rprimitive 82
83 def test_remove_duplicate(self) -> None: 84 assert RUnion.make_simplified_union([int_rprimitive, int_rprimitive]) == int_rprimitive 85
86 def test_cannot_simplify(self) -> None: 87 assert RUnion.make_simplified_union( 88 [int_rprimitive, str_rprimitive, object_rprimitive] 89 ) == RUnion([int_rprimitive, str_rprimitive, object_rprimitive]) 90
91 def test_nested(self) -> None: 92 assert RUnion.make_simplified_union( 93 [int_rprimitive, RUnion([str_rprimitive, int_rprimitive])] 94 ) == RUnion([int_rprimitive, str_rprimitive]) 95 assert RUnion.make_simplified_union(
94 ) == RUnion([int_rprimitive, str_rprimitive]) 95 assert RUnion.make_simplified_union( 96 [int_rprimitive, RUnion([str_rprimitive, RUnion([int_rprimitive])])] 97 ) == RUnion([int_rprimitive, str_rprimitive])
138 tree = result.graph[module.fullname].tree 139 assert tree is not None 140 return module, tree, result.types, mapper
144 # TODO: backport this to mypy 145 assert testcase.old_cwd is not None, "test was not properly set up" 146 testcase_path = os.path.join(testcase.old_cwd, testcase.file)
282 elif "_py" in name or "_Python" in name:
283 assert False, f"Invalid _py* suffix (should be _pythonX_Y): {name}"
284 return options
120 else: 121 assert False, "unknown error kind %d" % op.error_kind 122
125 if op.error_kind != ERR_ALWAYS: 126 assert not op.is_void, "void op generating errors?" 127
239 new = self.op_map.get(op, op) 240 assert new is not None, "use of removed op" 241 return new
366 new = self.fix_op(op.src) 367 assert isinstance(new, LoadStatic), new 368 op.src = new
118 119 assert op not in pre_live[key] 120 dest = op.dest if isinstance(op, Assign) else op
130 if dest not in pre_borrow[key] and dest in pre_live[key]: 131 assert isinstance(op, Assign), op 132 maybe_append_dec_ref(ops, dest, post_must_defined, key)
112 # Override missing methods on environments where it cannot be used like GAE. 113 import subprocess 114
40 import os.path 41 import pickle 42 from tkinter import (
186 self._root.destroy() 187 except: 188 pass 189 self._root = None
250 del self._callbacks[event][func] 251 except: 252 pass 253
530 self._root.destroy() 531 except: 532 pass 533 self._root = None
620 self._root.destroy() 621 except: 622 pass 623 self._root = None
814 with open(filename, "rb") as infile: 815 chart = pickle.load(infile) 816 name = os.path.basename(filename)
1720 del self._callbacks[event][func] 1721 except: 1722 pass 1723
2271 with open(filename, "rb") as infile: 2272 chart = pickle.load(infile) 2273 self._chart = chart
2309 with open(filename, "rb") as infile: 2310 grammar = pickle.load(infile) 2311 else:
1153 else:
1154 assert 0, f"bad scroll command {command} {args}"
1155 if showing_trace:
94 ) 95 except: 96 pass 97
52 import os 53 import pickle 54 import sys
557 pos_tuple = _pos_match((synset.pos(), None, None)) 558 assert pos_tuple is not None, "pos_tuple is null: synset.pos(): %s" % synset.pos() 559 descr = pos_tuple[2]
316 def _trees(self, edge, complete, memo, tree_class): 317 assert complete, "CCGChart cannot build incomplete trees" 318
41 def compute_composition_semantics(function, argument): 42 assert isinstance(argument, LambdaExpression), ( 43 "`" + str(argument) + "` must be a lambda expression" 44 ) 45 return LambdaExpression(
50 def compute_substitution_semantics(function, argument):
51 assert isinstance(function, LambdaExpression) and isinstance(
52 function.term, LambdaExpression
53 ), ("`" + str(function) + "` must be a lambda expression with 2 arguments")
54 assert isinstance(argument, LambdaExpression), (
53 ), ("`" + str(function) + "` must be a lambda expression with 2 arguments")
54 assert isinstance(argument, LambdaExpression), (
55 "`" + str(argument) + "` must be a lambda expression"
56 )
57
101 if match: 102 resp = random.choice(response) # pick a random response 103 resp = self._wildcards(resp, match) # process wildcards
14 import re 15 from xml.etree import ElementTree as ET 16
235 with open(annfile) as infile:
236 xml = ET.parse(infile).getroot()
237 for entity in xml.findall("document/entity"):
350 fmt = self._fmt
351 save_maxent_params(wgt, mpg, lab, aon, tab_dir=f"/tmp/english_ace_{fmt}/")
352
117 # self._logarithmic = False 118 assert encoding.length() == len(weights) 119
129 self._weights = new_weights 130 assert self._encoding.length() == len(new_weights) 131
1585 1586 def save_maxent_params(wgt, mpg, lab, aon, tab_dir="/tmp"): 1587
24 """ 25 import subprocess 26
133 raise ValueError("This function requires that numpy be installed")
134 assert explicit, "non-explicit not supported yet"
135 lines = s.strip().split("\n")
171 cmd = [_megam_bin] + args 172 p = subprocess.Popen(cmd, stdout=subprocess.PIPE) 173 (stdout, stderr) = p.communicate()
41 from platform import architecture, system 42 from subprocess import PIPE, Popen 43
137 # Run the tagger and get the output 138 p = Popen(_senna_cmd, stdin=PIPE, stdout=PIPE, stderr=PIPE) 139 (stdout, stderr) = p.communicate(input=_input)
7 8 import subprocess 9 import sys
84 cmd = [_tadm_bin] + args 85 p = subprocess.Popen(cmd, stdout=sys.stdout) 86 (stdout, stderr) = p.communicate()
13 import re 14 import subprocess 15 import tempfile
374 def make_classifier(featuresets):
375 return WekaClassifier.train("/tmp/name.model", featuresets, "C4.5")
376
74 def cluster_vectorspace(self, vectors, trace=False): 75 assert len(vectors) > 0 76
151 m = len(mean) 152 assert cvm.shape == (m, m), "bad sized covariance matrix, %s" % str(cvm.shape) 153 try:
102 for cluster in clusters: 103 assert len(cluster) > 0 104 if self._should_normalise:
71 self._max_difference = conv_test 72 assert not initial_means or len(initial_means) == num_means 73 self._means = initial_means
73 self._means = initial_means 74 assert repeats >= 1 75 assert not (initial_means and repeats > 1)
74 assert repeats >= 1 75 assert not (initial_means and repeats > 1) 76 self._repeats = repeats
76 self._repeats = repeats 77 self._rng = rng if rng else random.Random() 78 self._avoid_empty_clusters = avoid_empty_clusters
180 ) 181 assert False 182 centroid = copy.copy(cluster[0])
40 def cluster(self, vectors, assign_clusters=False, trace=False): 41 assert len(vectors) > 0 42
201 """ 202 assert len(indices) >= 2 203 node = _DendrogramNode(self._merge, *(self._items[i] for i in indices))
65 if keys: 66 assert isinstance(keys, list) 67 assert len(data) == len(keys)
66 assert isinstance(keys, list) 67 assert len(data) == len(keys) 68 return keys
69 else: 70 assert ( 71 isinstance(data, dict) 72 or isinstance(data, OrderedDict) 73 or isinstance(data, list) 74 ) 75 if isinstance(data, dict) or isinstance(data, OrderedDict):
321 if sublist_index == (len(self._offsets) - 1): 322 assert ( 323 index + len(sublist) >= self._offsets[-1] 324 ), "offsets not monotonic increasing!" 325 self._offsets.append(index + len(sublist))
326 else: 327 assert self._offsets[sublist_index + 1] == index + len( 328 sublist 329 ), "inconsistent list value (num elts)" 330
397 try:
398 scorer = eval("BigramAssocMeasures." + sys.argv[1])
399 except IndexError:
401 try:
402 compare_scorer = eval("BigramAssocMeasures." + sys.argv[2])
403 except IndexError:
9 from warnings import warn 10 from xml.etree import ElementTree as et 11
39 self.subdiv = self.subdiv_dict(
40 et.parse(fp).iterfind("localeDisplayNames/subdivisions/subdivision")
41 )
136 137 assert None not in result 138 return result
76 else: 77 assert 0, "bad block type" 78
148 149 assert isinstance(jeita.tagged_words()[0][1], str) 150
395 word += "-" + xmlinfl.text 396 except: 397 pass 398 # if there is a suffix
434 suffixTag = xmlsuffixpos[0].text 435 except: 436 pass 437 if suffixTag:
496 ) 497 except: 498 pass 499 sents.append(word)
543 def _tree2conll(self, tree, wordnum, words, pos, synt): 544 assert isinstance(tree, Tree) 545 if len(tree) == 1 and isinstance(tree[0], str):
546 pos[wordnum] = tree.label() 547 assert words[wordnum] == tree[0] 548 return wordnum + 1
549 elif len(tree) == 1 and isinstance(tree[0], tuple): 550 assert len(tree[0]) == 2 551 pos[wordnum], pos[wordnum] = tree[0]
307 for j, k, lbl in overt:
308 assert j >= i, ("Overlapping targets?", (j, k, lbl))
309 s1 += " " * (j - i) + "-" * (k - j)
502 if not j >= i: 503 assert j >= i, ( 504 "Overlapping targets?" 505 + ( 506 " UNANN" 507 if any(aset.status == "UNANN" for aset in sent.annotationSet[1:]) 508 else "" 509 ), 510 (j, k, asetIndex), 511 ) 512 s1 += " " * (j - i) + "*" * (k - j)
543 outstr += " (" + ", ".join("=".join(pair) for pair in fAbbrevs.items()) + ")"
544 assert len(fAbbrevs) == len(dict(fAbbrevs)), "Abbreviation clash"
545
637 outstr += "(" + ", ".join("=".join(pair) for pair in feAbbrevs.items()) + ")"
638 assert len(feAbbrevs) == len(dict(feAbbrevs)), "Abbreviation clash"
639 outstr += "\n"
871 872 assert self._type 873 if len(self) == 0:
877 else: 878 assert False, self._type 879 return outstr
1465 fentry = self._handle_frame_elt(elt, ignorekeys) 1466 assert fentry 1467
1783 f = self.frame_by_id(luinfo.frameID) 1784 assert f.name == frameName, (f.name, frameName) 1785 luinfo["frame"] = f
1857 queue = list(roots) 1858 assert queue 1859 while queue:
1897 # propagate downward 1898 assert subST is None or self.semtype_inherits(subST, superST), ( 1899 superST.name, 1900 ferel, 1901 subST.name, 1902 ) 1903 if subST is None:
1912 # propagate upward 1913 assert superST is None, (superST.name, ferel, subST.name) 1914 ferel.superFE.semType = superST = subST
2558 type = [rt for rt in self.frame_relation_types() if rt.name == type][0] 2559 assert isinstance(type, dict) 2560
2858 fe.requiresFE = frinfo.FE[name] 2859 assert fe.requiresFE.ID == ID 2860 if fe.excludesFE:
2862 fe.excludesFE = frinfo.FE[name] 2863 assert fe.excludesFE.ID == ID 2864
2894 info = self._load_xml_attributes(AttrDict(), elt) 2895 assert info["superFrameName"] != info["subFrameName"], (elt, info) 2896 info["_type"] = "framerelation"
2970 2971 assert info["annotationSet"][0].status == "UNANN" 2972 info["POS"] = info["annotationSet"][0].POS
3111 if annset is not None: 3112 assert annset.status == "UNANN" or "FE" in annset, annset 3113 if annset.status != "UNANN":
3168 ): # 'Sent' and 'Other' layers sometimes contain accidental duplicate spans 3169 assert thespan not in overt, (info.ID, l.name, thespan) 3170 overt.append(thespan)
3190 continue 3191 assert all(lblname == "Target" for i, j, lblname in overt) 3192 if "Target" in info:
3201 if l.rank == 1: 3202 assert "FE" not in info 3203 info["FE"] = (overt, ni)
3206 # sometimes there are 3 FE layers! e.g. Change_position_on_a_scale.fall.v 3207 assert 2 <= l.rank <= 3, l.rank 3208 k = "FE" + str(l.rank)
3208 k = "FE" + str(l.rank) 3209 assert k not in info 3210 info[k] = (overt, ni)
3211 elif l.name in ("GF", "PT"):
3212 assert l.rank == 1
3213 info[l.name] = overt
3214 elif l.name in ("BNC", "PENN"):
3215 assert l.rank == 1
3216 info["POS"] = overt
3243 self._warn(f"Missing target in annotation set ID={info.ID}")
3244 assert "FE" in info
3245 if "FE3" in info:
3245 if "FE3" in info: 3246 assert "FE2" in info 3247
278 if lines == [""]: 279 assert not sentences 280 return []
99 100 assert m is not None 101
178 ) 179 assert isinstance(knbc.words()[0], str) 180 assert isinstance(knbc.sents()[0][0], str)
179 assert isinstance(knbc.words()[0], str) 180 assert isinstance(knbc.sents()[0][0], str) 181 assert isinstance(knbc.tagged_words()[0], tuple)
180 assert isinstance(knbc.sents()[0][0], str) 181 assert isinstance(knbc.tagged_words()[0], tuple) 182 assert isinstance(knbc.tagged_sents()[0][0], tuple)
181 assert isinstance(knbc.tagged_words()[0], tuple) 182 assert isinstance(knbc.tagged_sents()[0][0], tuple) 183
9 from functools import total_ordering 10 from xml.etree import ElementTree 11
110 with self.abspath(framefile).open() as fp:
111 etree = ElementTree.parse(fp).getroot()
112 for roleset in etree.findall("predicate/roleset"):
133 with self.abspath(framefile).open() as fp:
134 etree = ElementTree.parse(fp).getroot()
135 rsets.append(etree.findall("predicate/roleset"))
9 from functools import total_ordering 10 from xml.etree import ElementTree 11
106 with self.abspath(framefile).open() as fp:
107 etree = ElementTree.parse(fp).getroot()
108 for roleset in etree.findall("predicate/roleset"):
129 with self.abspath(framefile).open() as fp:
130 etree = ElementTree.parse(fp).getroot()
131 rsets.append(etree.findall("predicate/roleset"))
98 """ 99 assert isinstance(review_line, ReviewLine) 100 self.review_lines.append(review_line)
123 """
124 assert unit in ("token", "word", "chunk")
125 result = []
143 144 assert None not in result 145 return result
25 import re 26 from xml.etree import ElementTree 27
85 if line == "": 86 assert instance_lines == [] 87 return []
92 m = re.search("item=(\"[^\"]+\"|'[^']+')", line)
93 assert m is not None # <lexelt> has no 'item=...'
94 lexelt = m.group(1)[1:-1]
95 if lexelt_num < len(self._lexelts): 96 assert lexelt == self._lexelts[lexelt_num] 97 else:
102 if line.lstrip().startswith("<instance"):
103 assert instance_lines == []
104 in_instance = True
113 xml_block = _fixXML(xml_block) 114 inst = ElementTree.fromstring(xml_block) 115 return [self._parse_instance(inst, lexelt)]
131 # Some santiy checks: 132 assert position is None, "head specified twice" 133 assert cword.text.strip() or len(cword) == 1
132 assert position is None, "head specified twice" 133 assert cword.text.strip() or len(cword) == 1 134 assert not (cword.text.strip() and len(cword) == 1)
133 assert cword.text.strip() or len(cword) == 1 134 assert not (cword.text.strip() and len(cword) == 1) 135 # Record the position of the head:
144 else: 145 assert False, "expected CDATA or wf in <head>" 146 elif cword.tag == "wf":
152 print("ACK", cword.tag)
153 assert False, "expected CDATA or <wf> or <head>"
154 if cword.tail:
156 else: 157 assert False, "unexpected tag %s" % child.tag 158 return SensevalInstance(lexelt, position, context, senses)
411 def audiodata(self, utterance, start=0, end=None): 412 assert end is None or end > start 413 headersize = 44
10 import os 11 import pickle 12 import re
14 from functools import reduce 15 from xml.etree import ElementTree 16
306 tokens = self.read_block(self._stream) 307 assert isinstance(tokens, (tuple, list, AbstractLazySequence)), ( 308 "block reader %s() should return list or tuple." 309 % self.read_block.__name__ 310 ) 311 num_toks = len(tokens)
312 new_filepos = self._stream.tell() 313 assert ( 314 new_filepos > filepos 315 ), "block reader %s() should consume at least 1 byte (filepos=%d)" % ( 316 self.read_block.__name__, 317 filepos, 318 ) 319
323 # Update our mapping. 324 assert toknum <= self._toknum[-1] 325 if num_toks > 0:
327 if toknum == self._toknum[-1]: 328 assert new_filepos > self._filepos[-1] # monotonic! 329 self._filepos.append(new_filepos)
332 # Check for consistency: 333 assert ( 334 new_filepos == self._filepos[block_index] 335 ), "inconsistent block reader (num chars read)" 336 assert (
335 ), "inconsistent block reader (num chars read)" 336 assert ( 337 toknum + num_toks == self._toknum[block_index] 338 ), "inconsistent block reader (num tokens returned)" 339
348 # If we're at the end of the file, then we're done. 349 assert new_filepos <= self._eofpos 350 if new_filepos == self._eofpos:
356 # If we reach this point, then we should know our length. 357 assert self._len is not None 358 # Enforce closing of stream once we reached end of file
609 encoding = getattr(stream, "encoding", None) 610 assert encoding is not None or isinstance(block, str) 611 if encoding not in (None, "utf-8"):
753 path.insert(0, dirname) 754 assert os.path.split(child)[0] != child 755 return path
159 else: 160 assert False # we saw it during _index()! 161
308 else: 309 assert False, "unexpected match condition" 310
1203 index, lexname, _ = line.split() 1204 assert int(index) == i 1205 self._lexnames.append(lexname)
1428 n_synsets = int(_next_token()) 1429 assert n_synsets > 0 1430
1437 n_senses = int(_next_token()) 1438 assert n_synsets == n_senses 1439
1571 raise WordNetError(message % lemma) 1572 assert synset._pos == pos or (pos == "a" and synset._pos == "s") 1573
1613 synset = self._synset_from_pos_and_line(pos, data_file_line) 1614 assert synset._offset == offset 1615 self._synset_offset_cache[pos][offset] = synset
1702 plus = _next_token() 1703 assert plus == "+" 1704 # read the frame and lemma number
14 import codecs 15 from xml.etree import ElementTree 16
44 with self.abspath(fileid).open() as fp: 45 elt = ElementTree.parse(fp).getroot() 46 # If requested, wrap it.
302 context = list(self._tag_context.get(stream.tell())) 303 assert context is not None # check this -- could it ever happen? 304
386 if pos in self._tag_context: 387 assert tuple(context) == self._tag_context[pos] 388 else:
392 elt_handler(
393 ElementTree.fromstring(elt.encode("ascii", "xmlcharrefreplace")),
394 context,
53 54 assert issubclass(reader_cls, CorpusReader) 55 self.__name = self.__name__ = name
37 import os 38 import pickle 39 import re
966 else: 967 return urlopen(resource_url) 968
1020 zipfile.ZipFile.__init__(self, filename) 1021 assert self.filename == filename 1022 self.close()
1027 def read(self, name): 1028 assert self.fp is None 1029 self.fp = open(self.filename, "rb")
1393 check2 = "".join(self.linebuffer) 1394 assert check1.startswith(check2) or check2.startswith(check1) 1395
73 """ 74 assert inspect.ismethod(func) or inspect.isfunction(func) 75 argspec = inspect.getfullargspec(func)
131 infodict = getinfo(model) 132 assert ( 133 not "_wrapper_" in infodict["argnames"] 134 ), '"_wrapper_" is a reserved argument name!' 135 src = "lambda %(signature)s: _wrapper_(%(signature)s)" % infodict
135 src = "lambda %(signature)s: _wrapper_(%(signature)s)" % infodict 136 funcopy = eval(src, dict(_wrapper_=wrapper)) 137 return update_wrapper(funcopy, model, infodict)
198 argnames = infodict["argnames"] 199 assert not ( 200 "_call_" in argnames or "_func_" in argnames 201 ), "You cannot use _call_ or _func_ as argument names!" 202 src = "lambda %(signature)s: _call_(_func_, %(signature)s)" % infodict
203 # import sys; print >> sys.stderr, src # for debugging purposes 204 dec_func = eval(src, dict(_func_=func, _call_=caller)) 205 return update_wrapper(dec_func, func, infodict)
164 import shutil 165 import subprocess 166 import sys
174 from urllib.request import urlopen 175 from xml.etree import ElementTree 176
267 if isinstance(xml, str): 268 xml = ElementTree.parse(xml) 269 for key in xml.attrib:
307 if isinstance(xml, str): 308 xml = ElementTree.parse(xml) 309 for key in xml.attrib:
693 try: 694 infile = urlopen(info.url) 695 with open(filepath, "wb") as outfile:
936 self._index = nltk.internals.ElementWrapper( 937 ElementTree.parse(urlopen(self._url)).getroot() 938 )
936 self._index = nltk.internals.ElementWrapper( 937 ElementTree.parse(urlopen(self._url)).getroot() 938 )
1337 for c in COLUMN_WEIGHTS: 1338 assert c in COLUMNS 1339 for c in COLUMN_WIDTHS:
1339 for c in COLUMN_WIDTHS: 1340 assert c in COLUMNS 1341 for c in INITIAL_COLUMNS:
1341 for c in INITIAL_COLUMNS: 1342 assert c in COLUMNS 1343
1585 var = tkinter.IntVar(self.top) 1586 assert column not in self._column_vars 1587 self._column_vars[column] = var
1769 else: 1770 assert 0, "bad tab value %r" % self._tab 1771 rows = [self._package_to_columns(item) for item in items]
2100 # Start downloading in a separate thread. 2101 assert self._download_msg_queue == [] 2102 assert self._download_abort_queue == []
2101 assert self._download_msg_queue == [] 2102 assert self._download_abort_queue == [] 2103 self._DownloadThread(
2244 def _md5_hexdigest(fp): 2245 md5_digest = md5() 2246 while True:
2426 """ 2427 p = subprocess.Popen( 2428 ["svn", "status", "-v", filename], 2429 stdout=subprocess.PIPE, 2430 stderr=subprocess.PIPE, 2431 ) 2432 (stdout, stderr) = p.communicate()
2426 """ 2427 p = subprocess.Popen( 2428 ["svn", "status", "-v", filename], 2429 stdout=subprocess.PIPE, 2430 stderr=subprocess.PIPE, 2431 ) 2432 (stdout, stderr) = p.communicate()
2449 xmlfile = os.path.join(dirname, filename) 2450 yield ElementTree.parse(xmlfile).getroot() 2451
2477 try: 2478 pkg_xml = ElementTree.parse(xmlfilename).getroot() 2479 except Exception as e:
537 self._reset() 538 except: 539 pass 540 self._destroy()
1116 for col in self._mlb.listboxes: 1117 assert len(self) == col.size() 1118 for row in self:
1118 for row in self: 1119 assert len(row) == self._num_columns 1120 assert self._num_columns == len(self._mlb.column_names)
1119 assert len(row) == self._num_columns 1120 assert self._num_columns == len(self._mlb.column_names) 1121 # assert self._column_names == self._mlb.column_names
1125 cell = self._reprfunc(i, j, cell) 1126 assert self._mlb.get(i)[j] == cell 1127
1020 def fill(cw): 1021 cw["fill"] = "#%06d" % random.randint(0, 999999) 1022
1054 def color(node): 1055 node["color"] = "#%04d00" % random.randint(0, 9999) 1056
1057 def color2(treeseg): 1058 treeseg.label()["fill"] = "#%06d" % random.randint(0, 9999) 1059 treeseg.label().child()["color"] = "white"
532 del self.__callbacks[button] 533 except: 534 pass 535
541 del self.__callbacks["drag"] 542 except: 543 pass 544
2146 self._reset() 2147 except: 2148 pass 2149 self._destroy()
2256 for text, colortag in self._item_repr(item):
2257 assert "\n" not in text, "item repr may not contain newline"
2258 self._textwidget.insert("end", text, colortag)
2355 del self._callbacks[e][func] 2356 except: 2357 pass 2358
2529 2530 cw["fill"] = "#00%04d" % randint(0, 9999) 2531
2534 2535 cw["color"] = "#ff%04d" % randint(0, 9999) 2536
784 if reentrances[id(self)]: 785 assert id(self) not in reentrance_ids 786 reentrance_ids[id(self)] = repr(len(reentrance_ids) + 1)
836 if reentrances[id(self)]: 837 assert id(self) not in reentrance_ids 838 reentrance_ids[id(self)] = repr(len(reentrance_ids) + 1)
1052 if reentrances[id(self)]: 1053 assert id(self) not in reentrance_ids 1054 reentrance_ids[id(self)] = repr(len(reentrance_ids) + 1)
1407 ) 1408 assert isinstance(fstruct1, fs_class) 1409 assert isinstance(fstruct2, fs_class)
1408 assert isinstance(fstruct1, fs_class) 1409 assert isinstance(fstruct2, fs_class) 1410
2002 def __init__(self, name, default=None, display=None): 2003 assert display in (None, "prefix", "slash") 2004
738 739 def chomsky_normal_form(self, new_token_padding="@$@", flexible=False): 740 """ 741 Returns a new Grammar that is in chomsky normal 742 743 :param: new_token_padding 744 Customise new rule formation during binarisation 745 """ 746 if self.is_chomsky_normal_form(): 747 return self 748 if self.productions(empty=True): 749 raise ValueError( 750 "Grammar has Empty rules. " "Cannot deal with them at the moment" 751 ) 752 753 step1 = CFG.eliminate_start(self) 754 step2 = CFG.binarize(step1, new_token_padding) 755 step3 = CFG.remove_mixed_rules(step2, new_token_padding) 756 if flexible: 757 return step3 758 step4 = CFG.remove_unitary_rules(step3) 759 return CFG(step4.start(), list(set(step4.productions()))) 760
195 for e in background: 196 assert isinstance(e, Expression) 197 self._background = background
484 for count, e in enumerate(background): 485 assert isinstance(e, Expression) 486 if verbose:
42 if model_builder is not None: 43 assert isinstance(model_builder, Mace) 44 else:
155 """ 156 assert isinstance(item, Variable) 157 for s in self:
13 import os 14 import subprocess 15
93 if prover is not None: 94 assert isinstance(prover, Prover9) 95 else:
206 pass 207 p = subprocess.Popen( 208 cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, stdin=subprocess.PIPE 209 ) 210 (stdout, stderr) = p.communicate(input=input_str)
117 if prover is not None: 118 assert isinstance(prover, ResolutionProver) 119 else:
434 """ 435 assert isinstance(a, Expression) 436 assert isinstance(b, Expression)
435 assert isinstance(a, Expression) 436 assert isinstance(b, Expression) 437
513 second = _clausify(expression.second) 514 assert len(first) == 1 515 assert len(second) == 1
514 assert len(first) == 1 515 assert len(second) == 1 516 return [first[0] + second[0]]
548 """ 549 assert isinstance(variable, Variable) 550 assert isinstance(binding, Expression)
549 assert isinstance(variable, Variable) 550 assert isinstance(binding, Expression) 551
582 """ 583 assert isinstance(variable, Variable) 584
421 if prover is not None: 422 assert isinstance(prover, TableauProver) 423 else:
14 import stat 15 import subprocess 16 import sys
19 import warnings 20 from xml.etree import ElementTree 21
139 # Call java via a subprocess 140 p = subprocess.Popen(cmd, stdin=stdin, stdout=stdout, stderr=stderr) 141 if not blocking:
230 try: 231 return eval(s[start_position : match.end()]), match.end() 232 except ValueError as e:
449 break 450 assert dep_cls, "Unable to determine which base is deprecated." 451
513 file_names = [filename] + (file_names or []) 514 assert isinstance(filename, str) 515 assert not isinstance(file_names, str)
514 assert isinstance(filename, str) 515 assert not isinstance(file_names, str) 516 assert not isinstance(searchpath, str)
515 assert not isinstance(file_names, str) 516 assert not isinstance(searchpath, str) 517 if isinstance(env_vars, str):
589 try: 590 p = subprocess.Popen( 591 ["which", alternative], 592 stdout=subprocess.PIPE, 593 stderr=subprocess.PIPE, 594 ) 595 stdout, stderr = p.communicate()
589 try: 590 p = subprocess.Popen( 591 ["which", alternative], 592 stdout=subprocess.PIPE, 593 stderr=subprocess.PIPE, 594 ) 595 stdout, stderr = p.communicate()
704 705 assert isinstance(name_pattern, str) 706 assert not isinstance(searchpath, str)
705 assert isinstance(name_pattern, str) 706 assert not isinstance(searchpath, str) 707 if isinstance(env_vars, str):
923 if isinstance(etree, str): 924 etree = ElementTree.fromstring(etree) 925 self.__dict__["_etree"] = etree
54 return seed_or_generator 55 return random.Random(seed_or_generator) 56
1333 1334 assert 0.0 <= epsilon <= 1.0, "Epsilon must be between 0.0 and 1.0." 1335 m = len(str1)
15 def revword(word): 16 if random.randint(1, 2) == 1: 17 return word[::-1]
88 for attempt in range(attempts): 89 r = random.randint(0, len(word)) 90 dir = random.choice([1, 2, 3, 4])
89 r = random.randint(0, len(word)) 90 dir = random.choice([1, 2, 3, 4]) 91 x = random.randint(0, rows)
90 dir = random.choice([1, 2, 3, 4]) 91 x = random.randint(0, rows) 92 y = random.randint(0, cols)
91 x = random.randint(0, rows) 92 y = random.randint(0, cols) 93 if dir == 1:
112 if grid[i][j] == "": 113 grid[i][j] = random.choice(alph) 114
1797 if numparses: 1798 assert len(parses) == numparses, "Not all parses found" 1799 if print_trees:
1826 if numparses: 1827 assert len(list(cp.parses())) == numparses, "Not all parses found" 1828 if print_trees:
142 try: 143 response = requests.get(requests.compat.urljoin(self.url, "live")) 144 except requests.exceptions.ConnectionError:
153 try: 154 response = requests.get(requests.compat.urljoin(self.url, "ready")) 155 except requests.exceptions.ConnectionError:
414 # Supports only 'pos' or 'ner' tags. 415 assert self.tagtype in [ 416 "pos", 417 "ner", 418 ], "CoreNLP tagger supports only 'pos' or 'ner' tags." 419 default_properties["annotators"] += self.tagtype
16 17 import subprocess 18 import warnings
324 else: 325 assert cell_number == len(cells) 326
565 if t in ["dot", "dot_json", "json", "svg"]: 566 proc = subprocess.run( 567 ["dot", "-T%s" % t], 568 capture_output=True, 569 input=dot_string, 570 text=True, 571 ) 572 else:
565 if t in ["dot", "dot_json", "json", "svg"]: 566 proc = subprocess.run( 567 ["dot", "-T%s" % t], 568 capture_output=True, 569 input=dot_string, 570 text=True, 571 ) 572 else:
572 else: 573 proc = subprocess.run( 574 ["dot", "-T%s" % t], 575 input=bytes(dot_string, encoding="utf8"), 576 ) 577 return proc.stdout
572 else: 573 proc = subprocess.run( 574 ["dot", "-T%s" % t], 575 input=bytes(dot_string, encoding="utf8"), 576 ) 577 return proc.stdout
540 if numparses: 541 assert len(parses) == numparses, "Not all parses found" 542 if print_trees:
654
655 profile.run("for i in range(1): demo()", "/tmp/profile.out")
656 import pstats
657
658 p = pstats.Stats("/tmp/profile.out")
659 p.strip_dirs().sort_stats("time", "cum").print_stats(60)
11 import os 12 import subprocess 13 import sys
74 75 assert malt_dependencies.issubset(_jars) 76 assert any(
75 assert malt_dependencies.issubset(_jars)
76 assert any(
77 filter(lambda i: i.startswith("maltparser-") and i.endswith(".jar"), _jars)
78 )
79 return list(_malt_jars)
194 os.chdir(os.path.split(self.model)[0]) 195 except: 196 pass 197 ret = self._execute(cmd, verbose) # Run command.
272 output = None if verbose else subprocess.PIPE 273 p = subprocess.Popen(cmd, stdout=output, stderr=output) 274 return p.wait()
398 def sort_queue(self, queue, chart): 399 i = random.randint(0, len(queue) - 1) 400 (queue[-1], queue[i]) = (queue[i], queue[-1])
11 import warnings 12 from subprocess import PIPE 13
8 9 import pickle 10 import tempfile
554 # First load the model 555 model = pickle.load(open(modelFile, "rb")) 556 operation = Transition(self._algorithm)
577 """ 578 p = random.random() 579 p_init = p
592 ) 593 return random.choice(list(self.samples())) 594
657 samples = set(samples) 658 randrow = [random.random() for i in range(len(samples))] 659 total = sum(randrow)
1258 """ 1259 assert bins is None or bins >= freqdist.B(), ( 1260 "bins parameter must not be less than %d=freqdist.B()" % freqdist.B() 1261 ) 1262 if bins is None:
1391 """ 1392 assert ( 1393 bins is None or bins > freqdist.B() 1394 ), "bins parameter must not be less than %d=freqdist.B()+1" % (freqdist.B() + 1) 1395 if bins is None:
1651 i = self._sample_dict.get(sample) 1652 assert i is not None 1653 if self._logs:
2436 for x in range(numoutcomes): 2437 y = random.randint(1, (1 + numsamples) // 2) + random.randint( 2438 0, numsamples // 2
2436 for x in range(numoutcomes): 2437 y = random.randint(1, (1 + numsamples) // 2) + random.randint( 2438 0, numsamples // 2 2439 ) 2440 fdist[y] += 1
33 import re 34 import subprocess 35 import tempfile
156 if discourse_ids is not None: 157 assert len(inputs) == len(discourse_ids) 158 assert reduce(operator.and_, (id is not None for id in discourse_ids))
157 assert len(inputs) == len(discourse_ids) 158 assert reduce(operator.and_, (id is not None for id in discourse_ids)) 159 use_disc_id = True
266 cmd = [binary] + args 267 p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) 268 else:
270 p = subprocess.Popen( 271 cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True 272 ) 273 stdout, stderr = p.communicate() 274
303 line = lines[i]
304 assert line.startswith(f"sem({drs_id},")
305 if line[-4:] == "').'":
306 line = line[:-4] + ")."
307 assert line.endswith(")."), f"can't parse line: {line}"
308
323 break 324 assert drs_start > -1 325
777 var = self.token() 778 assert re.match(r"^[exps]\d+$", var), var 779 return var
987 raise LogicalExpressionException(self._currentIndex, str(e)) from e 988 assert False, repr(tok) 989
1540 return DRS(drs1.refs + drs2.refs, drs1.conds + drs2.conds)
1541 assert False, f"{ex.__class__.__name__}: {ex}"
1542
127 import re 128 import shelve 129 import sys
348 349 assert is_rel(self._extension) 350 if "symmetric" in self.closures:
434 for t in records:
435 cur.execute("insert into %s values (?,?,?)" % table_name, t)
436 if verbose:
614 valuation = make_valuation(concepts, read=True) 615 db_out = shelve.open(db, "n") 616
634 else: 635 db_in = shelve.open(db) 636 from nltk.sem import Valuation
209 def __or__(self, other): 210 assert isinstance(other, DrtExpression) 211 return DrtOrExpression(self, other)
213 def __gt__(self, other): 214 assert isinstance(other, DrtExpression) 215 if isinstance(self, DRS):
229 """ 230 assert isinstance(other, DrtExpression) 231
606 if self.variable == variable: 607 assert isinstance( 608 expression, DrtAbstractVariableExpression 609 ), "Can only replace a proposition label with a variable" 610 return DrtProposition(
824 drs = self.simplify() 825 assert not isinstance(drs, DrtConcatenation) 826 return drs.eliminate_equality()
308 for var, val in assign:
309 assert val in self.domain, "'{}' is not in the domain: {}".format(
310 val,
311 self.domain,
312 )
313 assert is_indvar(var), (
312 ) 313 assert is_indvar(var), ( 314 "Wrong format for an Individual Variable: '%s'" % var 315 ) 316 self[var] = val
372 """
373 assert val in self.domain, f"{val} is not in the domain {self.domain}"
374 assert is_indvar(var), "Wrong format for an Individual Variable: '%s'" % var
373 assert val in self.domain, f"{val} is not in the domain {self.domain}"
374 assert is_indvar(var), "Wrong format for an Individual Variable: '%s'" % var
375 self[var] = val
398 def __init__(self, domain, valuation): 399 assert isinstance(domain, set) 400 self.domain = domain
102 def lambda_abstract(self, other): 103 assert isinstance(other, GlueFormula) 104 assert isinstance(other.meaning, AbstractVariableExpression)
103 assert isinstance(other, GlueFormula) 104 assert isinstance(other.meaning, AbstractVariableExpression) 105 return self.__class__(
140 def __str__(self):
141 assert isinstance(self.indices, set)
142 accum = f"{self.meaning} : {self.glue}"
132 label = args[0] 133 assert label not in self.fragments 134 self.fragments[label] = (func, args[1:])
160 top_holes = self._find_top_nodes(self.holes) 161 assert len(top_holes) == 1 # it must be unique 162 return top_holes.pop()
196 else: 197 assert node in self.labels 198 # The node is a label. Replace it in the queue by the holes and
211 # Add the current hole we're trying to plug into the list of ancestors. 212 assert hole not in ancestors0 213 ancestors = [hole] + ancestors0
271 label = node 272 assert label in self.labels 273 for c in self.constraints:
274 if c.lhs == label: 275 assert c.rhs in ancestors 276 args = self.fragments[label][1]
96 """ 97 assert isinstance(name, str) 98 self.name = name
168 """ 169 assert isinstance(other, Expression) 170 if isinstance(other, VariableExpression):
189 """ 190 assert isinstance(other, Expression) 191 try:
205 """ 206 assert isinstance(antecedent, Expression) 207 assert isinstance(consequent, Expression)
206 assert isinstance(antecedent, Expression) 207 assert isinstance(consequent, Expression) 208 self.antecedent = antecedent
224 """ 225 assert isinstance(other, ImpExpression) 226 try:
299 300 assert isinstance(function_simp, ImpExpression) 301 assert isinstance(argument_simp, Expression)
300 assert isinstance(function_simp, ImpExpression) 301 assert isinstance(argument_simp, Expression) 302
389 """ 390 assert isinstance(variable, VariableExpression) 391 assert isinstance(binding, Expression)
390 assert isinstance(variable, VariableExpression) 391 assert isinstance(binding, Expression) 392
392 393 assert variable != binding 394
407 """ 408 assert isinstance(variable, VariableExpression) 409
107 """ 108 assert isinstance(type_check, bool) 109
170 tokenTrie = Trie(self.get_all_symbols()) 171 token = "" 172 data_idx = 0
197 out.append(token) 198 token = "" 199 mapping[len(out)] = data_idx
206 out.append(token) 207 token = "" 208 else:
220 def process_quoted_token(self, data_idx, data): 221 token = "" 222 c = data[data_idx]
626 """ 627 assert isinstance(name, str), "%s is not a string" % name 628 self.name = name
671 else: 672 assert False, "Cannot generate a unique constant" 673 else:
707 def __init__(self, first, second): 708 assert isinstance(first, Type), "%s is not a Type" % first 709 assert isinstance(second, Type), "%s is not a Type" % second
708 assert isinstance(first, Type), "%s is not a Type" % first 709 assert isinstance(second, Type), "%s is not a Type" % second 710 self.first = first
841 def read_type(type_string):
842 assert isinstance(type_string, str)
843 type_string = type_string.replace(" ", "") # remove spaces
845 if type_string[0] == "<": 846 assert type_string[-1] == ">" 847 paren_count = 0
852 paren_count -= 1 853 assert paren_count > 0 854 elif char == ",":
965 def applyto(self, other): 966 assert isinstance(other, Expression), "%s is not an Expression" % other 967 return ApplicationExpression(self, other)
1011 """ 1012 assert isinstance(other, Expression), "%s is not an Expression" % other 1013
1091 """ 1092 assert isinstance(variable, Variable), "%s is not a Variable" % variable 1093 assert isinstance(expression, Expression), (
1092 assert isinstance(variable, Variable), "%s is not a Variable" % variable 1093 assert isinstance(expression, Expression), ( 1094 "%s is not an Expression" % expression 1095 ) 1096
1246 """ 1247 assert isinstance(function, Expression), "%s is not an Expression" % function 1248 assert isinstance(argument, Expression), "%s is not an Expression" % argument
1247 assert isinstance(function, Expression), "%s is not an Expression" % function 1248 assert isinstance(argument, Expression), "%s is not an Expression" % argument 1249 self.function = function
1268 """:see Expression._set_type()""" 1269 assert isinstance(other_type, Type) 1270
1293 """:see Expression.findtype()""" 1294 assert isinstance(variable, Variable), "%s is not a Variable" % variable 1295 if self.is_atom():
1422 """ 1423 assert isinstance(variable, Variable), "%s is not a Variable" % variable 1424 self.variable = variable
1430 """:see: Expression.replace()""" 1431 assert isinstance(variable, Variable), "%s is not an Variable" % variable 1432 assert isinstance(expression, Expression), (
1431 assert isinstance(variable, Variable), "%s is not an Variable" % variable 1432 assert isinstance(expression, Expression), ( 1433 "%s is not an Expression" % expression 1434 ) 1435 if self.variable == variable:
1441 """:see Expression._set_type()""" 1442 assert isinstance(other_type, Type) 1443
1458 """:see Expression.findtype()""" 1459 assert isinstance(variable, Variable), "%s is not a Variable" % variable 1460 if self.variable == variable:
1496 """:see Expression._set_type()""" 1497 assert isinstance(other_type, Type) 1498
1550 """:see Expression._set_type()""" 1551 assert isinstance(other_type, Type) 1552
1586 """ 1587 assert isinstance(variable, Variable), "%s is not a Variable" % variable 1588 if is_indvar(variable.name):
1606 """ 1607 assert isinstance(variable, Variable), "%s is not a Variable" % variable 1608 assert isinstance(term, Expression), "%s is not an Expression" % term
1607 assert isinstance(variable, Variable), "%s is not a Variable" % variable 1608 assert isinstance(term, Expression), "%s is not an Expression" % term 1609 self.variable = variable
1613 """:see: Expression.replace()""" 1614 assert isinstance(variable, Variable), "%s is not a Variable" % variable 1615 assert isinstance(expression, Expression), (
1614 assert isinstance(variable, Variable), "%s is not a Variable" % variable 1615 assert isinstance(expression, Expression), ( 1616 "%s is not an Expression" % expression 1617 ) 1618 # if the bound variable is the thing being replaced
1620 if replace_bound: 1621 assert isinstance(expression, AbstractVariableExpression), ( 1622 "%s is not a AbstractVariableExpression" % expression 1623 ) 1624 return self.__class__(
1646 """ 1647 assert isinstance(newvar, Variable), "%s is not a Variable" % newvar 1648 return self.__class__(
1657 """:see Expression.findtype()""" 1658 assert isinstance(variable, Variable), "%s is not a Variable" % variable 1659 if variable == self.variable:
1697 """:see Expression._set_type()""" 1698 assert isinstance(other_type, Type) 1699
1727 """:see Expression._set_type()""" 1728 assert isinstance(other_type, Type) 1729
1768 def __init__(self, term): 1769 assert isinstance(term, Expression), "%s is not an Expression" % term 1770 self.term = term
1777 """:see Expression._set_type()""" 1778 assert isinstance(other_type, Type) 1779
1787 def findtype(self, variable): 1788 assert isinstance(variable, Variable), "%s is not a Variable" % variable 1789 return self.term.findtype(variable)
1812 def __init__(self, first, second): 1813 assert isinstance(first, Expression), "%s is not an Expression" % first 1814 assert isinstance(second, Expression), "%s is not an Expression" % second
1813 assert isinstance(first, Expression), "%s is not an Expression" % first 1814 assert isinstance(second, Expression), "%s is not an Expression" % second 1815 self.first = first
1823 """:see Expression.findtype()""" 1824 assert isinstance(variable, Variable), "%s is not a Variable" % variable 1825 f = self.first.findtype(variable)
1861 """:see Expression._set_type()""" 1862 assert isinstance(other_type, Type) 1863
1917 """:see Expression._set_type()""" 1918 assert isinstance(other_type, Type) 1919
1973 """ 1974 assert isinstance(expr, str), "%s is not a string" % expr 1975 return re.match(r"^[a-df-z]\d*$", expr) is not None
1985 """ 1986 assert isinstance(expr, str), "%s is not a string" % expr 1987 return re.match(r"^[A-Z]\d*$", expr) is not None
1997 """ 1998 assert isinstance(expr, str), "%s is not a string" % expr 1999 return re.match(r"^e\d*$", expr) is not None
63 node = syntree.label() 64 assert isinstance(node, FeatStructNonterminal) 65 try:
273 if options.model:
274 exec("import %s as model" % options.model)
275
190 with open(filename, "wb") as storage_file: 191 import pickle 192
241 """Replaces `suffix` of `word` with `replacement""" 242 assert word.endswith(suffix), "Given word doesn't end with given suffix" 243 if suffix == "":
305 trainscores = train_stats["rulescores"]
306 assert len(trainscores) == len(
307 tids
308 ), "corrupt statistics: " "{} train scores for {} rules".format(
309 trainscores, tids
310 )
311 template_counts = Counter(tids)
477 478 assert min_acc is not None or not self._rules_by_score[max_score] 479 if not self._rules_by_score[max_score]:
587 def _trace_rule(self, rule): 588 assert self._rule_scores[rule] == sum(self._positions_by_rule[rule].values()) 589
872 """ 873 assert labeled_sequences or unlabeled_sequences 874 model = None
1317 1318 rng = random.Random() 1319 rng.seed(0)
14 import os 15 from subprocess import PIPE, Popen 16
98 [self._hunpos_bin, self._hunpos_model], 99 shell=False, 100 stdin=PIPE, 101 stdout=PIPE, 102 stderr=PIPE, 103 ) 104 self._closed = False 105 106 def __del__(self):
125 for token in tokens: 126 assert "\n" not in token, "Tokens should not contain newlines" 127 if isinstance(token, str):
70
71 assert coarse in _UNIVERSAL_TAGS, f"Unexpected coarse tag: {coarse}"
72 assert (
71 assert coarse in _UNIVERSAL_TAGS, f"Unexpected coarse tag: {coarse}"
72 assert (
73 fine not in _MAPPINGS[fileid]["universal"]
74 ), f"Multiple entries for original tag: {fine}"
75
22 from abc import abstractmethod 23 from subprocess import PIPE 24
56 else:
57 assert sep not in tag, "tag may not contain sep!"
58 return f"{word}{sep}{tag}"
10 import os 11 import pickle 12 import random
255 with open(cache_baseline_tagger) as print_rules:
256 baseline_tagger = pickle.load(print_rules)
257 print(f"Reloaded pickled tagger from {cache_baseline_tagger}")
327 with open(serialize_output) as print_rules:
328 brill_tagger_reloaded = pickle.load(print_rules)
329 print(f"Reloaded pickled tagger from {serialize_output}")
29 def test_N(self): 30 assert self.bigram_counter.N() == 16 31 assert self.trigram_counter.N() == 21
30 assert self.bigram_counter.N() == 16 31 assert self.trigram_counter.N() == 21 32
33 def test_counter_len_changes_with_lookup(self): 34 assert len(self.bigram_counter) == 2 35 self.bigram_counter[50]
35 self.bigram_counter[50] 36 assert len(self.bigram_counter) == 3 37
38 def test_ngram_order_access_unigrams(self): 39 assert self.bigram_counter[1] == self.bigram_counter.unigrams 40
58 def test_bigram_counts_seen_ngrams(self): 59 assert self.bigram_counter[["a"]]["b"] == 1 60 assert self.bigram_counter[["b"]]["c"] == 1
59 assert self.bigram_counter[["a"]]["b"] == 1 60 assert self.bigram_counter[["b"]]["c"] == 1 61
62 def test_bigram_counts_unseen_ngrams(self): 63 assert self.bigram_counter[["b"]]["z"] == 0 64
65 def test_unigram_counts_seen_words(self): 66 assert self.bigram_counter["b"] == 2 67
68 def test_unigram_counts_completely_unseen_words(self): 69 assert self.bigram_counter["z"] == 0 70
80 test = NgramCounter(case) 81 assert 2 not in test 82 assert test[1] == FreqDist()
81 assert 2 not in test 82 assert test[1] == FreqDist() 83
87 88 assert not counter[3] 89 assert not counter[2]
88 assert not counter[3] 89 assert not counter[2] 90 self.case.assertCountEqual(words, counter[1].keys())
104 counter = NgramCounter([bigram_sent]) 105 assert not bool(counter[3]) 106
68 def test_mle_bigram_scores(mle_bigram_model, word, context, expected_score): 69 assert pytest.approx(mle_bigram_model.score(word, context), 1e-4) == expected_score 70
72 def test_mle_bigram_logscore_for_zero_score(mle_bigram_model):
73 assert math.isinf(mle_bigram_model.logscore("d", ["e"]))
74
96 perplexity = 2.1398 97 assert pytest.approx(mle_bigram_model.entropy(trained), 1e-4) == H 98 assert pytest.approx(mle_bigram_model.perplexity(trained), 1e-4) == perplexity
97 assert pytest.approx(mle_bigram_model.entropy(trained), 1e-4) == H 98 assert pytest.approx(mle_bigram_model.perplexity(trained), 1e-4) == perplexity 99
104 105 assert math.isinf(mle_bigram_model.entropy(untrained)) 106 assert math.isinf(mle_bigram_model.perplexity(untrained))
105 assert math.isinf(mle_bigram_model.entropy(untrained)) 106 assert math.isinf(mle_bigram_model.perplexity(untrained)) 107
124 125 assert pytest.approx(mle_bigram_model.entropy(text), 1e-4) == H 126 assert pytest.approx(mle_bigram_model.perplexity(text), 1e-4) == perplexity
125 assert pytest.approx(mle_bigram_model.entropy(text), 1e-4) == H 126 assert pytest.approx(mle_bigram_model.perplexity(text), 1e-4) == perplexity 127
153 def test_mle_trigram_scores(mle_trigram_model, word, context, expected_score): 154 assert pytest.approx(mle_trigram_model.score(word, context), 1e-4) == expected_score 155
188 def test_lidstone_bigram_score(lidstone_bigram_model, word, context, expected_score): 189 assert ( 190 pytest.approx(lidstone_bigram_model.score(word, context), 1e-4) 191 == expected_score 192 ) 193
215 perplexity = 17.0504 216 assert pytest.approx(lidstone_bigram_model.entropy(text), 1e-4) == H 217 assert pytest.approx(lidstone_bigram_model.perplexity(text), 1e-4) == perplexity
216 assert pytest.approx(lidstone_bigram_model.entropy(text), 1e-4) == H 217 assert pytest.approx(lidstone_bigram_model.perplexity(text), 1e-4) == perplexity 218
239 def test_lidstone_trigram_score(lidstone_trigram_model, word, context, expected_score): 240 assert ( 241 pytest.approx(lidstone_trigram_model.score(word, context), 1e-4) 242 == expected_score 243 ) 244
278 def test_laplace_bigram_score(laplace_bigram_model, word, context, expected_score): 279 assert ( 280 pytest.approx(laplace_bigram_model.score(word, context), 1e-4) == expected_score 281 ) 282
304 perplexity = 8.7393 305 assert pytest.approx(laplace_bigram_model.entropy(text), 1e-4) == H 306 assert pytest.approx(laplace_bigram_model.perplexity(text), 1e-4) == perplexity
305 assert pytest.approx(laplace_bigram_model.entropy(text), 1e-4) == H 306 assert pytest.approx(laplace_bigram_model.perplexity(text), 1e-4) == perplexity 307
309 def test_laplace_gamma(laplace_bigram_model): 310 assert laplace_bigram_model.gamma == 1 311
352 ): 353 assert ( 354 pytest.approx(wittenbell_trigram_model.score(word, context), 1e-4) 355 == expected_score 356 ) 357
415 ): 416 assert ( 417 pytest.approx(kneserney_trigram_model.score(word, context), 1e-4) 418 == expected_score 419 ) 420
464 ): 465 assert ( 466 pytest.approx(absolute_discounting_trigram_model.score(word, context), 1e-4) 467 == expected_score 468 ) 469
501 ): 502 assert ( 503 pytest.approx(stupid_backoff_trigram_model.score(word, context), 1e-4) 504 == expected_score 505 ) 506
545 scores_for_context = sum(model.score(w, context) for w in model.vocab) 546 assert pytest.approx(scores_for_context, 1e-7) == 1.0 547
554 def test_generate_one_no_context(mle_trigram_model): 555 assert mle_trigram_model.generate(random_seed=3) == "<UNK>" 556
559 # We don't need random_seed for contexts with only one continuation 560 assert mle_trigram_model.generate(text_seed=["c"]) == "d" 561 assert mle_trigram_model.generate(text_seed=["b", "c"]) == "d"
560 assert mle_trigram_model.generate(text_seed=["c"]) == "d" 561 assert mle_trigram_model.generate(text_seed=["b", "c"]) == "d" 562 assert mle_trigram_model.generate(text_seed=["a", "c"]) == "d"
561 assert mle_trigram_model.generate(text_seed=["b", "c"]) == "d" 562 assert mle_trigram_model.generate(text_seed=["a", "c"]) == "d" 563
566 # When context doesn't limit our options enough, seed the random choice
567 assert mle_trigram_model.generate(text_seed=("a", "<s>"), random_seed=2) == "a"
568
575 # Test that we can escape the cycle
576 assert mle_trigram_model.generate(7, text_seed=("b", "d"), random_seed=5) == [
577 "b",
578 "d",
579 "b",
580 "d",
581 "b",
582 "d",
583 "</s>",
584 ]
585
587 def test_generate_with_text_seed(mle_trigram_model):
588 assert mle_trigram_model.generate(5, text_seed=("<s>", "e"), random_seed=3) == [
589 "<UNK>",
590 "a",
591 "d",
592 "b",
593 "<UNK>",
594 ]
595
597 def test_generate_oov_text_seed(mle_trigram_model):
598 assert mle_trigram_model.generate(
599 text_seed=("aliens",), random_seed=3
600 ) == mle_trigram_model.generate(text_seed=("<UNK>",), random_seed=3)
601
608 # This will work 609 assert mle_trigram_model.generate( 610 text_seed=None, random_seed=3 611 ) == mle_trigram_model.generate(random_seed=3)
11 12 assert result == expected 13
16 17 assert result == expected 18
35 36 assert result == expected 37
40 41 assert result == expected 42
47 """
48 assert aline.delta("p", "q") == 20.0
49 assert aline.delta("a", "A") == 0.0
48 assert aline.delta("p", "q") == 20.0
49 assert aline.delta("a", "A") == 0.0
22 23 assert isinstance(tree, Tree) 24 assert (
23 assert isinstance(tree, Tree) 24 assert ( 25 tree.pformat() 26 == """ 27 (S1 28 (S 29 (NP (PRP I)) 30 (VP 31 (VBD saw) 32 (NP (DT the) (NN man)) 33 (PP (IN with) (NP (DT the) (NN telescope)))))) 34 """.strip() 35 ) 36
40 41 assert isinstance(tagged_tree, Tree) 42 assert tagged_tree.pformat() == "(S1 (NP (NN telescope)))"
41 assert isinstance(tagged_tree, Tree) 42 assert tagged_tree.pformat() == "(S1 (NP (NN telescope)))"
52 tree = cp.parse(sent) 53 assert ( 54 tree.pformat() 55 == """(S 56 The/AT 57 September-October/NP 58 term/NN 59 jury/NN 60 had/HVD 61 been/BEN 62 charged/VBN 63 by/IN 64 Fulton/NP-TL 65 Superior/JJ-TL 66 (CHUNK Court/NN-TL Judge/NN-TL Durwood/NP Pye/NP) 67 to/TO 68 investigate/VB 69 reports/NNS 70 of/IN 71 possible/JJ 72 ``/`` 73 irregularities/NNS 74 ''/'' 75 in/IN 76 the/AT 77 hard-fought/JJ 78 primary/NN 79 which/WDT 80 was/BEDZ 81 won/VBN 82 by/IN 83 (CHUNK Mayor-nominate/NN-TL Ivan/NP Allen/NP Jr./NP) 84 ./.)""" 85 )
40 pdist = classifier.prob_classify(featureset)
41 assert abs(pdist.prob("x") - px) < 1e-2, (pdist.prob("x"), px)
42 assert abs(pdist.prob("y") - py) < 1e-2, (pdist.prob("y"), py)
41 assert abs(pdist.prob("x") - px) < 1e-2, (pdist.prob("x"), px)
42 assert abs(pdist.prob("y") - py) < 1e-2, (pdist.prob("y"), py)
43
20
21 assert sorted(b.ngram_fd.items()) == [
22 (("a", "a"), 1),
23 (("a", "test"), 1),
24 (("is", "a"), 1),
25 (("is", "is"), 1),
26 (("test", "test"), 1),
27 (("this", "is"), 1),
28 (("this", "this"), 1),
29 ]
30 assert sorted(b.word_fd.items()) == [("a", 2), ("is", 2), ("test", 2), ("this", 2)]
29 ]
30 assert sorted(b.word_fd.items()) == [("a", 2), ("is", 2), ("test", 2), ("this", 2)]
31
31 32 assert len(SENT) == sum(b.word_fd.values()) == sum(b.ngram_fd.values()) + 1 33 assert close_enough(
32 assert len(SENT) == sum(b.word_fd.values()) == sum(b.ngram_fd.values()) + 1
33 assert close_enough(
34 sorted(b.score_ngrams(BigramAssocMeasures.pmi)),
35 [
36 (("a", "a"), 1.0),
37 (("a", "test"), 1.0),
38 (("is", "a"), 1.0),
39 (("is", "is"), 1.0),
40 (("test", "test"), 1.0),
41 (("this", "is"), 1.0),
42 (("this", "this"), 1.0),
43 ],
44 )
45
48 b = BigramCollocationFinder.from_words(SENT, window_size=3)
49 assert sorted(b.ngram_fd.items()) == sorted(
50 [
51 (("a", "test"), 3),
52 (("is", "a"), 3),
53 (("this", "is"), 3),
54 (("a", "a"), 1),
55 (("is", "is"), 1),
56 (("test", "test"), 1),
57 (("this", "this"), 1),
58 ]
59 )
60
60
61 assert sorted(b.word_fd.items()) == sorted(
62 [("a", 2), ("is", 2), ("test", 2), ("this", 2)]
63 )
64
64 65 assert ( 66 len(SENT) == sum(b.word_fd.values()) == (sum(b.ngram_fd.values()) + 2 + 1) / 2.0 67 ) 68 assert close_enough(
67 )
68 assert close_enough(
69 sorted(b.score_ngrams(BigramAssocMeasures.pmi)),
70 sorted(
71 [
72 (("a", "test"), 1.584962500721156),
73 (("is", "a"), 1.584962500721156),
74 (("this", "is"), 1.584962500721156),
75 (("a", "a"), 0.0),
76 (("is", "is"), 0.0),
77 (("test", "test"), 0.0),
78 (("this", "this"), 0.0),
79 ]
80 ),
81 )
82
85 b = BigramCollocationFinder.from_words(SENT, window_size=5)
86 assert sorted(b.ngram_fd.items()) == sorted(
87 [
88 (("a", "test"), 4),
89 (("is", "a"), 4),
90 (("this", "is"), 4),
91 (("is", "test"), 3),
92 (("this", "a"), 3),
93 (("a", "a"), 1),
94 (("is", "is"), 1),
95 (("test", "test"), 1),
96 (("this", "this"), 1),
97 ]
98 )
99 assert sorted(b.word_fd.items()) == sorted(
98 )
99 assert sorted(b.word_fd.items()) == sorted(
100 [("a", 2), ("is", 2), ("test", 2), ("this", 2)]
101 )
102 n_word_fd = sum(b.word_fd.values())
103 n_ngram_fd = (sum(b.ngram_fd.values()) + 4 + 3 + 2 + 1) / 4.0 104 assert len(SENT) == n_word_fd == n_ngram_fd 105 assert close_enough(
104 assert len(SENT) == n_word_fd == n_ngram_fd
105 assert close_enough(
106 sorted(b.score_ngrams(BigramAssocMeasures.pmi)),
107 sorted(
108 [
109 (("a", "test"), 1.0),
110 (("is", "a"), 1.0),
111 (("this", "is"), 1.0),
112 (("is", "test"), 0.5849625007211562),
113 (("this", "a"), 0.5849625007211562),
114 (("a", "a"), -1.0),
115 (("is", "is"), -1.0),
116 (("test", "test"), -1.0),
117 (("this", "this"), -1.0),
118 ]
119 ),
120 )
26 txt = udhr.raw(name) 27 assert not isinstance(txt, bytes), name 28
36 37 assert text_pl == expected, "Polish-Latin2" 38 assert text_ppl == expected, "Polish_Polski-Latin2"
37 assert text_pl == expected, "Polish-Latin2" 38 assert text_ppl == expected, "Polish_Polski-Latin2" 39
14 nltk.data.find(no_such_thing) 15 assert no_such_thing in str(exc)
128 ) 129 assert predicted == expected
13 download_status = download("mwa_ppdb", download_dir)
14 assert download_status is True
15
23 download_status = download("mwa_ppdb", download_dir)
24 assert download_status is True
25
37 download_status = download("stopwords", download_dir)
38 assert download_status is True
39 if i == first_download:
43 ) 44 assert print_mock.call_args_list[1].args == expected_second_call.args 45 elif i == second_download:
48 ) 49 assert print_mock.call_args_list[1].args == expected_second_call.args 50
76 package_element = xml_index[0][0]
77 assert package_element.get("id") == "test_package"
78 md5_checksum = package_element.get("checksum")
78 md5_checksum = package_element.get("checksum")
79 assert isinstance(md5_checksum, str)
80 assert len(md5_checksum) > 5
79 assert isinstance(md5_checksum, str)
80 assert len(md5_checksum) > 5
81 sha256_checksum = package_element.get("sha256_checksum")
81 sha256_checksum = package_element.get("sha256_checksum")
82 assert isinstance(sha256_checksum, str)
83 assert len(sha256_checksum) > 5
82 assert isinstance(sha256_checksum, str) 83 assert len(sha256_checksum) > 5
6 distribution = nltk.FreqDist(samples) 7 assert list(distribution) == ["two", "one"]
42 json2csv(infile, outfn, ["text"], gzip_compress=False) 43 assert files_are_identical(outfn, ref_fn) 44
62 json2csv(infile, outfn, fields, gzip_compress=False) 63 assert files_are_identical(outfn, ref_fn) 64
71 json2csv(infile, outfn, fields, gzip_compress=False) 72 assert files_are_identical(outfn, ref_fn) 73
85 ) 86 assert files_are_identical(outfn, ref_fn) 87
99 ) 100 assert files_are_identical(outfn, ref_fn) 101
114 115 assert files_are_identical(outfn, ref_fn) 116
129 130 assert files_are_identical(outfn, ref_fn) 131
144 145 assert files_are_identical(outfn, ref_fn) 146
159 160 assert files_are_identical(outfn, ref_fn) 161
174 175 assert files_are_identical(outfn, ref_fn) 176
199 200 assert files_are_identical(outfn, ref_fn) 201
209 json2csv(infile, outfn, ["text"], gzip_compress=False) 210 assert not files_are_identical(outfn, ref_fn)
45 ] 46 assert pos_tag(word_tokenize(text)) == expected_tagged 47
61 ] 62 assert pos_tag(word_tokenize(text), tagset="universal") == expected_tagged 63
88 ] 89 assert pos_tag(word_tokenize(text), lang="rus") == expected_tagged 90
101 ] 102 assert ( 103 pos_tag(word_tokenize(text), tagset="universal", lang="rus") 104 == expected_tagged 105 ) 106
116 expected_but_wrong = [("모르겠", "JJ"), ("습니", "NNP"), ("다", "NN")]
117 assert pos_tag(word_tokenize(text)) == expected_but_wrong
9 10 assert word_rank_alignment(ref, hyp) == [] 11
13 hypotheses = [hyp] 14 assert corpus_ribes(list_of_refs, hypotheses) == 0.0 15
23 24 assert word_rank_alignment(ref, hyp) == [3] 25
27 hypotheses = [hyp] 28 assert corpus_ribes(list_of_refs, hypotheses) == 0.0 29
37 38 assert word_rank_alignment(ref, hyp) == [9, 3] 39
41 hypotheses = [hyp] 42 assert corpus_ribes(list_of_refs, hypotheses) == 0.0 43
155 156 assert round(score, 4) == 0.3597 157
245 246 assert round(score, 4) == 0.1688
69 expected_output = list(filter(None, expected_output)) 70 assert test_output == expected_output 71
76
77 assert extractor.hyp_words == {"member", "China", "SCO."}
78 assert extractor.overlap("word") == set()
77 assert extractor.hyp_words == {"member", "China", "SCO."}
78 assert extractor.overlap("word") == set()
79 assert extractor.overlap("ne") == {"China"}
78 assert extractor.overlap("word") == set()
79 assert extractor.overlap("ne") == {"China"}
80 assert extractor.hyp_extra("word") == {"member"}
79 assert extractor.overlap("ne") == {"China"}
80 assert extractor.hyp_extra("word") == {"member"}
81
14 # Should open at the start of the file 15 assert reader.tell() == 0 16
17 # Compare original string to contents from `.readlines()` 18 assert unicode_string == "".join(reader.readlines()) 19
21 stream.seek(0, os.SEEK_END) 22 assert reader.tell() == stream.tell() 23
31 contents += char 32 assert unicode_string == contents 33
77 reader = SeekableUnicodeStreamReader(BytesIO(b""), "ascii") 78 assert not reader.stream.closed 79 reader.__del__()
79 reader.__del__() 80 assert reader.stream.closed 81
16 ar_stemmer = SnowballStemmer("arabic", True)
17 assert ar_stemmer.stem("الْعَرَبِــــــيَّة") == "عرب"
18 assert ar_stemmer.stem("العربية") == "عرب"
17 assert ar_stemmer.stem("الْعَرَبِــــــيَّة") == "عرب"
18 assert ar_stemmer.stem("العربية") == "عرب"
19 assert ar_stemmer.stem("فقالوا") == "قال"
18 assert ar_stemmer.stem("العربية") == "عرب"
19 assert ar_stemmer.stem("فقالوا") == "قال"
20 assert ar_stemmer.stem("الطالبات") == "طالب"
19 assert ar_stemmer.stem("فقالوا") == "قال"
20 assert ar_stemmer.stem("الطالبات") == "طالب"
21 assert ar_stemmer.stem("فالطالبات") == "طالب"
20 assert ar_stemmer.stem("الطالبات") == "طالب"
21 assert ar_stemmer.stem("فالطالبات") == "طالب"
22 assert ar_stemmer.stem("والطالبات") == "طالب"
21 assert ar_stemmer.stem("فالطالبات") == "طالب"
22 assert ar_stemmer.stem("والطالبات") == "طالب"
23 assert ar_stemmer.stem("الطالبون") == "طالب"
22 assert ar_stemmer.stem("والطالبات") == "طالب"
23 assert ar_stemmer.stem("الطالبون") == "طالب"
24 assert ar_stemmer.stem("اللذان") == "اللذان"
23 assert ar_stemmer.stem("الطالبون") == "طالب"
24 assert ar_stemmer.stem("اللذان") == "اللذان"
25 assert ar_stemmer.stem("من") == "من"
24 assert ar_stemmer.stem("اللذان") == "اللذان"
25 assert ar_stemmer.stem("من") == "من"
26 # Test where the ignore_stopwords=False.
27 ar_stemmer = SnowballStemmer("arabic", False)
28 assert ar_stemmer.stem("اللذان") == "اللذ" # this is a stop word
29 assert ar_stemmer.stem("الطالبات") == "طالب"
28 assert ar_stemmer.stem("اللذان") == "اللذ" # this is a stop word
29 assert ar_stemmer.stem("الطالبات") == "طالب"
30 assert ar_stemmer.stem("الكلمات") == "كلم"
29 assert ar_stemmer.stem("الطالبات") == "طالب"
30 assert ar_stemmer.stem("الكلمات") == "كلم"
31 # test where create the arabic stemmer without given init value to ignore_stopwords
32 ar_stemmer = SnowballStemmer("arabic")
33 assert ar_stemmer.stem("الْعَرَبِــــــيَّة") == "عرب"
34 assert ar_stemmer.stem("العربية") == "عرب"
33 assert ar_stemmer.stem("الْعَرَبِــــــيَّة") == "عرب"
34 assert ar_stemmer.stem("العربية") == "عرب"
35 assert ar_stemmer.stem("فقالوا") == "قال"
34 assert ar_stemmer.stem("العربية") == "عرب"
35 assert ar_stemmer.stem("فقالوا") == "قال"
36 assert ar_stemmer.stem("الطالبات") == "طالب"
35 assert ar_stemmer.stem("فقالوا") == "قال"
36 assert ar_stemmer.stem("الطالبات") == "طالب"
37 assert ar_stemmer.stem("الكلمات") == "كلم"
36 assert ar_stemmer.stem("الطالبات") == "طالب"
37 assert ar_stemmer.stem("الكلمات") == "كلم"
38
40 stemmer_russian = SnowballStemmer("russian")
41 assert stemmer_russian.stem("авантненькая") == "авантненьк"
42
46
47 assert stemmer_german.stem("Schr\xe4nke") == "schrank"
48 assert stemmer_german2.stem("Schr\xe4nke") == "schrank"
47 assert stemmer_german.stem("Schr\xe4nke") == "schrank"
48 assert stemmer_german2.stem("Schr\xe4nke") == "schrank"
49
49
50 assert stemmer_german.stem("keinen") == "kein"
51 assert stemmer_german2.stem("keinen") == "keinen"
50 assert stemmer_german.stem("keinen") == "kein"
51 assert stemmer_german2.stem("keinen") == "keinen"
52
55
56 assert stemmer.stem("Visionado") == "vision"
57
58 # The word 'algue' was raising an IndexError
59 assert stemmer.stem("algue") == "algu"
60
62 stemmer = SnowballStemmer("english")
63 assert stemmer.stem("y's") == "y"
64
78 our_stem = stemmer.stem(word)
79 assert (
80 our_stem == true_stem
81 ), "{} should stem to {} in {} mode but got {}".format(
82 word,
83 true_stem,
84 stemmer_mode,
85 our_stem,
86 )
87
144 """
145 assert PorterStemmer().stem("oed") == "o"
146
152 porter = PorterStemmer()
153 assert porter.stem("On") == "on"
154 assert porter.stem("I") == "i"
153 assert porter.stem("On") == "on"
154 assert porter.stem("I") == "i"
155 assert porter.stem("I", to_lowercase=False) == "I"
154 assert porter.stem("I") == "i"
155 assert porter.stem("I", to_lowercase=False) == "I"
156 assert porter.stem("Github") == "github"
155 assert porter.stem("I", to_lowercase=False) == "I"
156 assert porter.stem("Github") == "github"
157 assert porter.stem("Github", to_lowercase=False) == "Github"
156 assert porter.stem("Github") == "github"
157 assert porter.stem("Github", to_lowercase=False) == "Github"
5 result = pos_tag(word_tokenize("John's big idea isn't all that bad."))
6 assert result == [
7 ("John", "NNP"),
8 ("'s", "POS"),
9 ("big", "JJ"),
10 ("idea", "NN"),
11 ("is", "VBZ"),
12 ("n't", "RB"),
13 ("all", "PDT"),
14 ("that", "DT"),
15 ("bad", "JJ"),
16 (".", "."),
17 ]
18
59 ] 60 assert tokens == expected 61
264 predicted = tokenizer.tokenize(test_input) 265 assert predicted == expected 266
272 tokens = tokenizer.tokenize("justification")
273 assert tokens == ["jus", "ti", "fi", "ca", "tion"]
274
281 tokens = tokenizer.tokenize(text) 282 assert tokens == [text] 283
292 tokens = tokenizer.tokenize(test_word) 293 assert tokens == ["won", "der", "ful"] 294
303 segmented_sent = seg.segment(sent.split()) 304 assert segmented_sent.split() == [ 305 "يبحث", 306 "علم", 307 "الحاسوب", 308 "استخدام", 309 "الحوسبة", 310 "ب", 311 "جميع", 312 "اشكال", 313 "ها", 314 "ل", 315 "حل", 316 "المشكلات", 317 ] 318
327 segmented_sent = seg.segment(sent.split()) 328 assert segmented_sent.split() == [ 329 "这", 330 "是", 331 "斯坦福", 332 "中文", 333 "分词器", 334 "测试", 335 ] 336
346 result = tokenizer.tokenize(test1) 347 assert result == expected 348
353 result = tokenizer.tokenize(test2) 354 assert result == expected 355
365 result = tokenizer.tokenize(test1) 366 assert result == expected 367
371 result = tokenizer.tokenize(test2) 372 assert result == expected 373
401 result = tokenizer.tokenize(test3) 402 assert result == expected 403
408 result = tokenizer.tokenize(test4) 409 assert result == expected 410
413 result = tokenizer.tokenize(test5) 414 assert result == expected 415
418 result = tokenizer.tokenize(test6) 419 assert result == expected 420
440 ] 441 assert word_tokenize(text) == expected 442
465 ] 466 assert word_tokenize(text) == expected 467
478 result = tokenizer.tokenize(test1) 479 assert result == expected 480
514 result = tokenizer.tokenize(test2) 515 assert result == expected 516
559 result = tokenizer.tokenize(test3) 560 assert result == expected 561
565 result = tokenizer.tokenize(test4) 566 assert result == expected 567
597 result = tokenizer.tokenize(test5) 598 assert result == expected 599
603 result = tokenizer.tokenize(test6) 604 assert result == expected 605
618 result = tokenizer.tokenize(test7) 619 assert result == expected 620
655 result = list(tokenizer.span_tokenize(test1)) 656 assert result == expected 657
684 result = list(tokenizer.span_tokenize(test2)) 685 assert result == expected 686
715 result = list(tokenizer.span_tokenize(test3)) 716 assert result == expected 717
740 ] 741 assert word_tokenize(sentence) == expected 742
744 expected = ["'", "v", "'", "'re", "'"] 745 assert word_tokenize(sentence) == expected 746
756 757 assert actual_output == expected_output 758
792 793 assert obj.tokenize(sentences) == expected 794
803 804 assert obj.tokenize(sentences) == expected 805
827 828 assert len(tokenizer.tokenize(input_text)) == n_sents 829 assert len(list(tokenizer.debug_decisions(input_text))) == n_splits
828 assert len(tokenizer.tokenize(input_text)) == n_sents 829 assert len(list(tokenizer.debug_decisions(input_text))) == n_splits 830
872 def test_sent_tokenize(self, sentences: str, expected: List[str]): 873 assert sent_tokenize(sentences) == expected 874
877 tokenizer = CharTokenizer() 878 assert tokenizer.tokenize(sentence) == list(sentence) 879 assert list(tokenizer.span_tokenize(sentence)) == [
878 assert tokenizer.tokenize(sentence) == list(sentence) 879 assert list(tokenizer.span_tokenize(sentence)) == [ 880 (0, 1), 881 (1, 2), 882 (2, 3), 883 (3, 4), 884 (4, 5), 885 (5, 6), 886 (6, 7), 887 (7, 8), 888 (8, 9), 889 (9, 10), 890 (10, 11), 891 ] 892
33 fn = os.path.basename(auth.creds_subdir) 34 assert fn == os.environ["TWITTER"] 35
74 75 assert auth.creds_fullpath == os.path.join(self.subdir, auth.creds_file) 76 assert auth.creds_file == "credentials.txt"
75 assert auth.creds_fullpath == os.path.join(self.subdir, auth.creds_file) 76 assert auth.creds_file == "credentials.txt" 77 assert oauth["app_key"] == "a"
76 assert auth.creds_file == "credentials.txt" 77 assert oauth["app_key"] == "a"
21 output = list(everygrams(everygram_input)) 22 assert output == expected_output 23
33 output = list(everygrams(everygram_input, max_len=2)) 34 assert output == expected_output 35
43 output = list(everygrams(everygram_input, min_len=2)) 44 assert output == expected_output 45
62 output = list(everygrams(everygram_input, max_len=3, pad_right=True)) 63 assert output == expected_output 64
81 output = list(everygrams(everygram_input, max_len=3, pad_left=True)) 82 assert output == expected_output
36 hyp1_unigram_precision = float(modified_precision(references, hyp1, n=1)) 37 assert round(hyp1_unigram_precision, 4) == 0.2857 38 # With assertAlmostEqual at 4 place precision.
41 # Testing modified bigram precision. 42 assert float(modified_precision(references, hyp1, n=2)) == 0.0 43
62 # Testing modified unigram precision. 63 assert float(modified_precision(references, hyp1, n=1)) == 1.0 64
65 # Testing modified bigram precision. 66 assert float(modified_precision(references, hyp1, n=2)) == 1.0 67
86 # Test unigram precision with rounding. 87 assert round(hyp1_unigram_precision, 4) == 0.9444 88 assert round(hyp2_unigram_precision, 4) == 0.5714
87 assert round(hyp1_unigram_precision, 4) == 0.9444 88 assert round(hyp2_unigram_precision, 4) == 0.5714 89
96 # Test bigram precision with rounding. 97 assert round(hyp1_bigram_precision, 4) == 0.5882 98 assert round(hyp2_bigram_precision, 4) == 0.0769
97 assert round(hyp1_bigram_precision, 4) == 0.5882 98 assert round(hyp2_bigram_precision, 4) == 0.0769 99
114 closest_ref_len = closest_ref_length(references, hyp_len) 115 assert brevity_penalty(closest_ref_len, hyp_len) == 1.0 116
124 weights = (1.0 / n,) * n # Uniform weights. 125 assert sentence_bleu(references, hypothesis, weights) == 0 126
134 weights = (1.0 / n,) * n # Uniform weights. 135 assert sentence_bleu(references, hypothesis, weights) == 1.0 136
182 hypothesis = [] 183 assert sentence_bleu(references, hypothesis) == 0 184
198 hypothesis = "John loves Mary".split() 199 assert sentence_bleu(references, hypothesis) == 0 200
204 hypothesis = [] 205 assert sentence_bleu(references, hypothesis) == 0 206
226 weights = np.array([0.25] * 4) 227 assert sentence_bleu(references, hypothesis, weights) == 0 228
258 # the actual translation quality might not be. 259 assert abs(mteval_bleu - nltk_bleu) < 0.005 260
269 ) 270 assert abs(mteval_bleu - nltk_bleu) < 0.005 271
407 ) 408 assert bleu_scores[0] == corpus_bleu( 409 [[ref1a, ref1b, ref1c], [ref2a]], [hyp1, hyp2], weight_1 410 ) 411 assert bleu_scores[1] == corpus_bleu(
410 ) 411 assert bleu_scores[1] == corpus_bleu( 412 [[ref1a, ref1b, ref1c], [ref2a]], [hyp1, hyp2], weight_2 413 ) 414 assert bleu_scores[2] == corpus_bleu(
413 ) 414 assert bleu_scores[2] == corpus_bleu( 415 [[ref1a, ref1b, ref1c], [ref2a]], [hyp1, hyp2], weight_3 416 )
11 score = meteor_score(self.reference, self.candidate, preprocess=str.lower) 12 assert score == 0.9921875 13
35 # Check that the NIST scores difference is less than 0.5 36 assert abs(mteval_nist - nltk_nist) < 0.05
595 596 assert length > 0, "The `length` must be more than 0." 597 while len(generated_tokens) < length:
602 ): 603 if token == "<s>": 604 continue
604 continue 605 if token == "</s>": 606 break
309 """ 310 assert len(tokens) == 1 311 assert tokens[0][0] == "@"
310 assert len(tokens) == 1 311 assert tokens[0][0] == "@" 312 macro_name = tokens[0][1:]
331 # disjunctive definition of a node name 332 assert list(set(tokens[1::2])) == ["|"] 333 # recursively call self to interpret each node name definition
344 elif tokens[0].startswith('"'):
345 assert tokens[0].endswith('"')
346 node_lit = tokens[0][1:-1].replace('\\"', '"').replace("\\\\", "\\")
350 elif tokens[0].startswith("/"):
351 assert tokens[0].endswith("/")
352 node_lit = tokens[0][1:-1]
375 """
376 assert len(tokens) == 3
377 assert tokens[0] == "("
376 assert len(tokens) == 3
377 assert tokens[0] == "("
378 assert tokens[2] == ")"
377 assert tokens[0] == "("
378 assert tokens[2] == ")"
379 return tokens[1]
409 # process square-bracketed relation expressions 410 assert len(tokens) == 3 411 assert tokens[2] == "]"
410 assert len(tokens) == 3 411 assert tokens[2] == "]" 412 retval = tokens[1]
414 # process operator-node relation expressions 415 assert len(tokens) == 2 416 operator, predicate = tokens
719 """
720 assert len(tokens) == 1
721 assert tokens[0].startswith("=")
720 assert len(tokens) == 1
721 assert tokens[0].startswith("=")
722 return tokens[0][1:]
738 """
739 assert len(tokens) == 1
740 assert tokens[0].startswith("=")
739 assert len(tokens) == 1
740 assert tokens[0].startswith("=")
741 node_label = tokens[0][1:]
770 # a tgrep_node_label, a string value containing the node label 771 assert len(tokens) == 3 772 assert tokens[1] == "="
771 assert len(tokens) == 3 772 assert tokens[1] == "=" 773 node_pred = tokens[0]
811 """ 812 assert len(tokens) == 3 813 assert tokens[0] == "@"
812 assert len(tokens) == 3
813 assert tokens[0] == "@"
814 return {tokens[1]: tokens[2]}
1568 # Move our position pointer to the end of the token. 1569 assert text[pos : pos + len(tok)] == tok 1570 pos += len(tok)
1590 print("writing to /tmp/punkt.new...")
1591 with open("/tmp/punkt.new", "w") as outfile:
1592 for aug_tok in tokens:
1753 def save_params(self):
1754 save_punkt_params(self._params, dir=f"/tmp/{self._lang}")
1755
1773 1774 def save_punkt_params(params, dir="/tmp/punkt_tab"): 1775 from os import mkdir
11 import re 12 import subprocess 13 import sys
112 def _execute(cmd): 113 p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) 114 stdout, stderr = p.communicate()
146 # Checks for the REPP binary and erg/repp.set config file. 147 assert os.path.exists(_repp_dir + "/src/repp") 148 assert os.path.exists(_repp_dir + "/erg/repp.set")
147 assert os.path.exists(_repp_dir + "/src/repp") 148 assert os.path.exists(_repp_dir + "/erg/repp.set") 149 return _repp_dir
12 import warnings 13 from subprocess import PIPE 14
16 import warnings 17 from subprocess import PIPE 18
450 else:
451 w = eval("numpy." + window + "(window_len)")
452
8 from re import finditer 9 from xml.sax.saxutils import escape, unescape 10
15 from io import StringIO 16 from xml.etree.ElementTree import Element, ElementTree, SubElement, TreeBuilder 17
10 11 import subprocess 12 from collections import namedtuple
52 else: 53 assert type(alignment) is Alignment 54 self.alignment = alignment
131 try: 132 process = subprocess.Popen( 133 ["dot", "-T%s" % output_format], 134 stdin=subprocess.PIPE, 135 stdout=subprocess.PIPE, 136 stderr=subprocess.PIPE, 137 ) 138 except OSError as e:
131 try: 132 process = subprocess.Popen( 133 ["dot", "-T%s" % output_format], 134 stdin=subprocess.PIPE, 135 stdout=subprocess.PIPE, 136 stderr=subprocess.PIPE, 137 ) 138 except OSError as e:
283 284 assert type(alignment) is Alignment 285
219 220 assert len(list_of_references) == len(hypotheses), ( 221 "The number of hypotheses and their reference(s) should be the " "same " 222 ) 223
691 # to use this smoothing technique. 692 assert p_n[2], "This smoothing method requires non-zero precision for bigrams." 693 for i, p_i in enumerate(p_n):
190 191 assert len(references) == len( 192 hypotheses 193 ), "The number of hypotheses and their references should be the same" 194 num_sents = len(hypotheses)
145 # sanity check 146 assert len(list_of_references) == len( 147 hypotheses 148 ), "The number of hypotheses and their reference(s) should be the same" 149
319 320 assert len(references) == len(hypothesis), ( 321 "The number of hypothesis and their reference(s) should be the " "same " 322 ) 323
36 else: 37 assert reference.issubset(possible) # sanity check 38
85 # Before proceeding to compute NIST, perform sanity checks. 86 assert len(list_of_references) == len( 87 hypotheses 88 ), "The number of hypotheses and their reference(s) should be the same" 89
341 return i 342 assert False, "expected to find self in self._parent!" 343
384 # Sanity checks 385 assert isinstance(child, ParentedTree) 386 assert self[index] is child
385 assert isinstance(child, ParentedTree) 386 assert self[index] is child 387 assert child._parent is self
386 assert self[index] is child 387 assert child._parent is self 388
558 # Sanity checks 559 assert isinstance(child, MultiParentedTree) 560 assert self[index] is child
559 assert isinstance(child, MultiParentedTree) 560 assert self[index] is child 561 assert len([p for p in child._parents if p is self]) == 1
560 assert self[index] is child 561 assert len([p for p in child._parents if p is self]) == 1 562
246 i = int(tree[m]) * scale 247 assert matrix[0][i] is None, (matrix[0][i], m, i) 248 matrix[0][i] = ids[m]
294 childcols[m[:-1]].add((rowidx, i)) 295 assert len(positions) == 0 296
690 else: 691 assert stack[0][0] is None 692 assert len(stack[0][1]) == 1
691 assert stack[0][0] is None 692 assert len(stack[0][1]) == 1 693 tree = stack[0][1][0]
1153 if offset > end: 1154 assert end != middle - 1, "infinite loop" 1155 end = middle - 1
1158 elif line > key: 1159 assert end != middle - 1, "infinite loop" 1160 end = middle - 1
1179
1180 def set_proxy(proxy, user=None, password=""):
1181 """
1182 Set the HTTP proxy for Python to download through.
1183
1184 If ``proxy`` is None then tries to set proxy from environment or system
1185 settings.
1186
1187 :param proxy: The HTTP proxy server to use. For example:
1188 'http://proxy.example.com:3128/'
1189 :param user: The username to authenticate with. Use None to disable
1190 authentication.
1191 :param password: The password to authenticate with.
1192 """
1193 if proxy is None:
1194 # Try and find the system proxy settings
1195 try:
1196 proxy = getproxies()["http"]
1197 except KeyError as e:
1198 raise ValueError("Could not detect default proxy settings") from e
1199
1200 # Set up the proxy handler
1201 proxy_handler = ProxyHandler({"https": proxy, "http": proxy})
1202 opener = build_opener(proxy_handler)
1203
1204 if user is not None:
1205 # Set up basic proxy authentication if provided
1206 password_manager = HTTPPasswordMgrWithDefaultRealm()
1207 password_manager.add_password(realm=None, uri=proxy, user=user, passwd=password)
1208 opener.add_handler(ProxyBasicAuthHandler(password_manager))
1209 opener.add_handler(ProxyDigestAuthHandler(password_manager))
1210
1211 # Override the existing url opener
1212 install_opener(opener)
1213
70 # ensure the endpoint is callable 71 assert callable(endpoint) 72 # classes are not always callable, make an extra check
74 obj = endpoint() 75 assert callable(obj) 76
158 def decorator(endpoint): 159 assert patterns 160 for pat in patterns:
95 version_string: str | None = os.confstr("CS_GNU_LIBC_VERSION")
96 assert version_string is not None
97 _, version = version_string.rsplit()
10 import re 11 import subprocess 12 import sys
51 return None 52 proc = subprocess.run([ld], stderr=subprocess.PIPE, text=True) 53 return _parse_musl_version(proc.stderr)
78 plat = sysconfig.get_platform()
79 assert plat.startswith("linux-"), "not linux"
80
122 """
123 assert self.next_token is None, (
124 f"Cannot check for {name!r}, already have {self.next_token!r}"
125 )
126 assert name in self.rules, f"Unknown token name: {name!r}"
125 )
126 assert name in self.rules, f"Unknown token name: {name!r}"
127
148 token = self.next_token 149 assert token is not None 150
89 python_tokens.append("False")
90 elif token == "with":
91 python_tokens.append("or")
91 python_tokens.append("or")
92 elif token == "(" and python_tokens and python_tokens[-1] not in {"or", "and"}:
93 message = f"Invalid license expression: {raw_license_expression!r}"
99 try: 100 invalid = eval(python_expression, globals(), locals()) 101 except Exception:
142 ) -> str: 143 assert isinstance(marker, (list, tuple, str)) 144
203 if key == "extra": 204 assert isinstance(rhs, str), "extra value must be a string" 205 return (canonicalize_name(lhs), canonicalize_name(rhs))
221 for marker in markers: 222 assert isinstance(marker, (list, tuple, str)) 223
236 rhs_value = environment[environment_key] 237 assert isinstance(lhs_value, str), "lhs must be a string" 238 lhs_value, rhs_value = _normalize(lhs_value, rhs_value, key=environment_key)
240 else: 241 assert marker in ["and", "or"] 242 if marker == "or":
225 payload = msg.get_payload() 226 assert isinstance(payload, str) 227 return payload
231 bpayload = msg.get_payload(decode=True) 232 assert isinstance(bpayload, bytes) 233 try:
336 # a str, so we'll just assert here to make sure. 337 assert isinstance(h, (email.header.Header, str)) 338
10 import struct 11 import subprocess 12 import sys
413 # instead of the real version.
414 version_str = subprocess.run(
415 [
416 sys.executable,
417 "-sS",
418 "-c",
419 "import platform; print(platform.mac_ver()[0])",
420 ],
421 check=True,
422 env={"SYSTEM_VERSION_COMPAT": "0"},
423 stdout=subprocess.PIPE,
424 text=True,
425 ).stdout
426 version = cast("AppleVersion", tuple(map(int, version_str.split(".")[:2])))
496 497 assert not letter 498 if number:
331 else: 332 assert action == _RECORD 333 # 'content' is record key
344 # (otherwise _source & _records are somehow out of sync) 345 assert not pending, "failed to write all records: missing=%r" % (pending,) 346
401 """ 402 assert isinstance(value, bytes), "expected value to be bytes" 403 if self.return_unicode:
863 # rehash user's password if old hash was deprecated 864 assert user in self._records # otherwise would have to use ._set_record() 865 self._records[user] = new_hash
307 if context: 308 assert isinstance(context, CryptContext) 309 self._context = context
310 self._stub_policy = kwds.pop("_stub_policy", False)
311 assert not (args or kwds), "unexpected args: %r %r" % (args,kwds)
312 else:
1015 # using private attrs to store some extra metadata in custom handler 1016 assert subcls is not handler, "expected unique variant of handler" 1017 ##subcls._Context__category = category
1073 default = self.default_scheme(category) 1074 assert default 1075 record = self._records[None, category] = self.get_record(default,
1403 else: 1404 assert not kwds, "_autoload=False and kwds are mutually exclusive" 1405
1612 # split string into 1-3 parts
1613 assert isinstance(ckey, native_string_types)
1614 parts = ckey.replace(".", "__").split("__")
1883 1884 assert isinstance(value, native_string_types), \ 1885 "expected string for key: %r %r" % (key, value) 1886
2445 #: secret used for dummy_verify() 2446 _dummy_secret = "too many secrets" 2447
2516 record = self._config.disabled_record 2517 assert record.is_disabled 2518 return record.disable(hash)
101 # parse ident 102 assert isinstance(ident, native_string_types) 103 add_null_padding = True
114 # decode & validate salt 115 assert isinstance(salt, bytes) 116 salt = bcrypt64.decode_bytes(salt)
122 # prepare password 123 assert isinstance(password, bytes) 124 if add_null_padding:
323 truncating data as needed to reach specified size""" 324 assert isinstance(data, bytes) 325 dlen = len(data)
357 """perform stock Blowfish keyschedule setup""" 358 assert len(key_words) >= 18, "key_words must be at least as large as P" 359 P, S, encipher = self.P, self.S, self.encipher
384 385 assert len(key_words) >= 18, "key_words must be at least as large as P" 386 salt_size = len(salt_words)
386 salt_size = len(salt_words) 387 assert salt_size, "salt_words must not be empty" 388 assert not salt_size & 1, "salt_words must have even length"
387 assert salt_size, "salt_words must not be empty" 388 assert not salt_size & 1, "salt_words must have even length" 389 P, S, encipher = self.P, self.S, self.encipher
226 else: 227 assert len(block) == 64 228 self._process(block)
648 offset += 7 649 assert not (result & ~INT_64_MASK) 650 return result
304 name = name_list[0] 305 assert name 306
362 if name: # (skips iana name if it's empty) 363 assert cache.get(name) in [None, info], "%r already in cache" % name 364 cache[name] = info
481 const() 482 assert "shouldn't get here" 483 self.error_text = msg
661 const, digest_size, block_size = digest_info 662 assert block_size >= 16, "block size too small" 663
999 ldict = dict() 1000 eval(code, gdict, ldict) 1001 helper = ldict['helper']
1013 else: 1014 assert _force_backend in ["any", "hexlify"] 1015
66 self.bmix_half_len = r << 4
67 assert struct.calcsize("I") == 4
68 self.bmix_struct = struct.Struct("<" + str(bmix_len) + "I")
81 else: 82 assert n <= 0xFFFFffffFFFFffff 83 ig1 = operator.itemgetter(-16)
473 if get_user_category: 474 assert callable(get_user_category) 475 self.get_user_category = get_user_category
1192 """monkeypatch object+attr at <path> to have <value>, stores original""" 1193 assert value != _UNSET 1194 current = self._get_path(path)
1205 if wrap: 1206 assert callable(value) 1207 wrapped = orig
455 'please use ``type="d"`` instead') 456 assert type is None 457 type = TYPE_D
469 if type is None: 470 assert uh.validate_default_value(self, self.type, self._norm_type, param="type") 471 else:
475 if version is None: 476 assert uh.validate_default_value(self, self.version, self._norm_version, 477 param="version") 478 else:
482 if memory_cost is None: 483 assert uh.validate_default_value(self, self.memory_cost, self._norm_memory_cost, 484 param="memory_cost") 485 else:
489 if data is None: 490 assert self.data is None 491 else:
597 max_version = mixin_cls.max_version 598 assert isinstance(max_version, int) and max_version >= 0x10 599 if max_version < 0x13:
706 # make sure we write info to base class's __dict__, not that of a subclass 707 assert mixin_cls is _CffiBackend 708
725 # TYPE_ID support not added until v18.2 726 assert type not in (TYPE_I, TYPE_D), "unexpected missing type: %r" % type 727 mixin_cls._backend_type_map = type_map
773 result = _argon2_cffi.low_level.verify_secret(hash, secret, type_code) 774 assert result is True 775 return True
830 # make sure we write info to base class's __dict__, not that of a subclass 831 assert mixin_cls is _PureBackend 832
862 # TYPE_ID support not added until v1.3 863 assert type not in (TYPE_I, TYPE_D), "unexpected missing type: %r" % type 864 mixin_cls._backend_type_map = type_map
237 salt = super(_BcryptCommon, cls)._norm_salt(salt, **kwds) 238 assert salt is not None, "HasSalt didn't generate new salt!" 239 changed, salt = bcrypt64.check_repair_unused(salt)
287 #---------------------------------------------------------------- 288 assert mixin_cls is bcrypt._backend_mixin_map[backend], \ 289 "_configure_workarounds() invoked from wrong class" 290
655 hash = _bcrypt.hashpw(secret, config) 656 assert isinstance(hash, bytes) 657 if not hash.startswith(config) or len(hash) != len(config)+31:
213 secret += spoil_digest 214 digest = md5(secret).digest() 215
374 salt = self._generate_salt() 375 assert self._norm_salt(salt) == salt, "generated invalid salt: %r" % (salt,) 376 else:
44 """pure-python backed for des_crypt""" 45 assert len(salt) == 2 46
57 secret = secret.encode("utf-8")
58 assert isinstance(secret, bytes)
59
93 secret = secret.encode("utf-8")
94 assert isinstance(secret, bytes)
95
125 data = render_bytes("%s:%s:%s", user, realm, secret)
126 return hashlib.md5(data).hexdigest()
127
123 secret = secret.encode("utf-8")
124 return str_to_uascii(sha1(self.salt.encode("ascii") + secret).hexdigest())
125
161 secret = secret.encode("utf-8")
162 return str_to_uascii(md5(self.salt.encode("ascii") + secret).hexdigest())
163
123 variant = self.default_variant 124 assert self._norm_variant(variant) == variant, "invalid default variant: %r" % (variant,) 125 else:
88 cs = cls.checksum_size 89 assert cs 90 return cls(checksum=data[:cs], salt=data[cs:])
71 pwd = pwd.encode("utf-8")
72 assert isinstance(pwd, bytes), "pwd not unicode or bytes"
73 if _BNULL in pwd:
77 # validate salt - should have been taken care of by caller
78 assert isinstance(salt, unicode), "salt not unicode"
79 salt = salt.encode("ascii")
79 salt = salt.encode("ascii")
80 assert len(salt) < 9, "salt too large"
81 # NOTE: spec says salts larger than 8 bytes should be truncated,
93 #=================================================================== 94 db = md5(pwd + salt + pwd).digest() 95
99 # start out with pwd + magic + salt 100 a_ctx = md5(pwd + magic + salt) 101 a_ctx_update = a_ctx.update
176 for even, odd in data: 177 dc = md5(odd + md5(dc + even).digest()).digest() 178 blocks -= 1
176 for even, odd in data: 177 dc = md5(odd + md5(dc + even).digest()).digest() 178 blocks -= 1
181 for even, odd in data[:17]: 182 dc = md5(odd + md5(dc + even).digest()).digest() 183
181 for even, odd in data[:17]: 182 dc = md5(odd + md5(dc + even).digest()).digest() 183
166 marker = cls.default_marker 167 assert marker and cls.identify(marker) 168 return to_native_str(marker, param="marker")
57 def _raw_mssql(secret, salt): 58 assert isinstance(secret, unicode) 59 assert isinstance(salt, bytes)
58 assert isinstance(secret, unicode)
59 assert isinstance(salt, bytes)
60 return sha1(secret.encode("utf-16-le") + salt).digest()
59 assert isinstance(salt, bytes)
60 return sha1(secret.encode("utf-16-le") + salt).digest()
61
89 # assumes ascii-compat encoding 90 assert isinstance(hash, bytes) 91 if len(hash) == csize and hash.startswith(BIDENT):
119 secret = secret.encode("utf-8")
120 return str_to_uascii(sha1(sha1(secret).digest()).hexdigest()).upper()
121
119 secret = secret.encode("utf-8")
120 return str_to_uascii(sha1(sha1(secret).digest()).hexdigest()).upper()
121
162 secret = secret.encode("utf-8")
163 chk = sha1(secret + unhexlify(self.salt.encode("ascii"))).hexdigest()
164 return str_to_uascii(chk).upper()
121 real_rounds = 1<<self.rounds
122 result = md5(self.salt.encode("ascii") + secret).digest()
123 r = 0
124 while r < real_rounds: 125 result = md5(result + secret).digest() 126 r += 1
46 user = to_bytes(self.user, "utf-8", param="user") 47 return str_to_uascii(md5(secret + user).hexdigest()) 48
284 if algs is not None: 285 assert default_algs is None 286 default_algs = algs
312 algs = list(self.default_algs) 313 assert self._norm_algs(algs) == algs, "invalid default algs: %r" % (algs,) 314 else:
214 nstr, bstr, pstr = parts
215 assert nstr.startswith("ln=")
216 assert bstr.startswith("r=")
215 assert nstr.startswith("ln=")
216 assert bstr.startswith("r=")
217 assert pstr.startswith("p=")
216 assert bstr.startswith("r=")
217 assert pstr.startswith("p=")
218 else:
285 else: 286 assert ident == IDENT_7 287 salt = self.salt
309 if block_size is None: 310 assert uh.validate_default_value(self, self.block_size, self._norm_block_size, 311 param="block_size") 312 else:
91 pwd = pwd.encode("utf-8")
92 assert isinstance(pwd, bytes)
93 if _BNULL in pwd:
97 # validate rounds 98 assert 1000 <= rounds <= 999999999, "invalid rounds" 99 # NOTE: spec says out-of-range rounds should be clipped, instead of
103 # validate salt
104 assert isinstance(salt, unicode), "salt not unicode"
105 salt = salt.encode("ascii")
106 salt_len = len(salt) 107 assert salt_len < 17, "salt too large" 108 # NOTE: spec says salts larger than 16 bytes should be truncated,
160 dp = repeat_string(tmp_ctx.digest(), pwd_len) 161 assert len(dp) == pwd_len 162
166 ds = hash_const(salt * (16 + byte_elem_value(da[0]))).digest()[:salt_len] 167 assert len(ds) == salt_len, "salt_len somehow > hash_len!" 168
307 raise uh.exc.InvalidHashError(cls) 308 assert len(ident) == 3 309 parts = hash[3:].split(_UDOLLAR)
312 if parts[0].startswith(_UROUNDS): 313 assert len(_UROUNDS) == 7 314 rounds = parts.pop(0)[7:]
87 global MAGIC_HAMLET 88 assert isinstance(secret, bytes) 89 assert isinstance(salt, bytes)
88 assert isinstance(secret, bytes) 89 assert isinstance(salt, bytes) 90
98 # NOTE: algorithm 'salt' includes full config string w/ trailing "$" 99 result = md5(secret + salt).digest() 100 assert len(result) == 16
99 result = md5(secret + salt).digest() 100 assert len(result) == 16 101
150 # construct hash for this round 151 h = md5(result) 152 if coin:
243 # make sure subclass set things up correctly 244 assert self.symbol_count is not None, "subclass must set .symbol_count" 245
390 charset = self.charset 391 assert charset 392 chars = default_charsets[charset]
646 wordset = self.wordset 647 assert wordset 648 words = default_wordsets[wordset]
326 # normalize name (and if changed, check dict again)
327 assert isinstance(name, unicode_or_str), "name must be string instance"
328 alt = name.replace("-","_").lower()
358 # XXX: issue deprecation warning here? 359 assert is_crypt_handler(handler), "unexpected object: name=%r object=%r" % (name, handler) 360 return handler
8 import os 9 import subprocess 10 # site
48 stdin = stdin.encode("utf-8")
49 proc = subprocess.Popen([htpasswd_path] + args, stdout=subprocess.PIPE,
50 stderr=subprocess.STDOUT, stdin=subprocess.PIPE if stdin else None)
51 out, err = proc.communicate(stdin)
1173 check_state.append((hash, secret)) 1174 return secret == "nu" 1175
1179 secret = secret.encode("utf-8")
1180 return str_to_uascii(md5(secret).hexdigest())
1181
1189 # now with a password 1190 self.assertFalse(ctx.needs_update(hash, secret='bob')) 1191 self.assertEqual(check_state, [(hash,'bob')])
1194 # now when it returns True 1195 self.assertTrue(ctx.needs_update(hash, secret='nu')) 1196 self.assertEqual(check_state, [(hash,'nu')])
1736 secret = secret.encode("utf-8")
1737 return str_to_uascii(hashlib.sha1(b"prefix" + secret).hexdigest())
1738
69 # make sure sample_config_1s uses \n linesep - tests rely on this
70 assert sample_config_1s.startswith("[passlib]\nschemes")
71
111 try:
112 hashlib.new("md4")
113 return True
86 try:
87 hashlib.new("sha")
88 has_sha = True
102 try:
103 hashlib.new("md4")
104 has_md4 = True
37 count = len(data) // 4
38 assert check_count is None or check_count == count
39 return struct.unpack("<%dI" % count, data)
312 def setUp(self): 313 assert self.backend 314 scrypt_mod._set_backend(self.backend)
129 """ 130 assert name 131 from django.contrib.auth.hashers import make_password
634 user = FakeUser() 635 user.password = "" 636 self.assertFalse(user.check_password(PASS1))
732 deprecated = ctx.handler(scheme).deprecated 733 assert not deprecated or scheme != ctx.default_scheme() 734 try:
737 raise self.skipTest("backend not available")
738 assert handler_derived_from(handler, testcase.handler)
739 if handler.is_disabled:
745 if not patched and not check_django_hasher_has_backend(handler.django_name):
746 assert scheme in ["django_bcrypt", "django_bcrypt_sha256", "django_argon2"], \
747 "%r scheme should always have active backend" % scheme
748 log.warning("skipping scheme %r due to missing django dependency", scheme)
144 return 145 assert context is adapter.context 146 schemes = [
67 except exc.MissingBackendError: 68 assert scheme in conditionally_available_hashes 69 raise
217 odd_hash = '_Z/..TgFg0/ptQtpAgws' 218 secret = 'test' 219
923 handler = hash.mssql2000 924 secret_case_insensitive = "verify-only" 925 # FIXME: fix UT framework - this hash is sensitive to password case, but verify() is not
212 self.checksum = self._stub_checksum 213 assert self.checksum 214 return self.to_string()
260 else: 261 assert self.backend == "argon2pure" 262 # should parse and verify
281 else: 282 assert self.backend == "argon2pure" 283 # should parse, but fail to verify
364 def check_padding(hash):
365 assert hash.startswith(("$2a$", "$2b$")) and len(hash) >= 28, \
366 "unexpectedly malformed hash: %r" % (hash,)
367 self.assertTrue(hash[28] in '.Oeu',
650 salt = "nyKYxTAvjmy6lMDYMl11Uu"
651 secret = "test"
652 temp_digest = compile_hmac("sha256", salt.encode("ascii"))(secret.encode("ascii"))
87 from django.contrib.auth.hashers import check_password 88 assert self.known_correct_hashes 89 for secret, hash in self.iter_known_hashes():
210 default = handler.default_salt_size 211 assert handler.min_salt_size == 0 212 lower = 1
393 self.checksum = self._stub_checksum 394 assert self.checksum 395 return handler._wrap_hash(self.to_string())
52 # sanity check that we're above 44 ensures minimum requirements (44 - 37 int = 7 frac) 53 assert sys.float_info.radix == 2, "unexpected float_info.radix" 54 assert sys.float_info.mant_dig >= 44, "double precision unexpectedly small"
53 assert sys.float_info.radix == 2, "unexpected float_info.radix" 54 assert sys.float_info.mant_dig >= 44, "double precision unexpectedly small" 55
955 time = 141230981 956 token = '781501' 957 otp = TOTP.using(now=lambda: time + 24 * 3600)(KEY3)
965 time = 141230981 966 token = '781501' 967 otp = TOTP.using(now=lambda: time + 24 * 3600)(KEY3)
973 time = 141230981 974 token = '781501' 975 otp = TOTP.using(now=lambda: time + 24 * 3600)(KEY3)
981 time = 141230981 982 token = '781501' 983 otp = TOTP.using(now=lambda: time + 24 * 3600)(KEY3)
158 from passlib.utils import genseed 159 rng = random.Random(genseed()) 160 a = rng.randint(0, 10**10)
161 162 rng = random.Random(genseed()) 163 b = rng.randint(0, 10**10)
202 def get_hash(secret): 203 assert isinstance(secret, unicode) 204 hash = hasher.hash(secret)
206 hash = hash.decode("utf-8")
207 assert isinstance(hash, unicode)
208 return hash
120 raise ValueError("invalid hash")
121 return hashlib.sha1(b"xyz" + secret).hexdigest()
122
634 registry.register_crypt_handler(self.dummy) 635 assert registry.get_crypt_handler(self.name) is self.dummy 636 return self.dummy
801 data = b"boblious" + secret 802 return str_to_uascii(hashlib.sha1(data).hexdigest()) 803
831 data = self.salt.encode("ascii") + secret + self.salt.encode("ascii")
832 return str_to_uascii(hashlib.sha1(data).hexdigest())
833
317 def prf(key, msg): 318 return hashlib.md5(key+msg+b'fooey').digest() 319 self.assertRaises(NotImplementedError, pbkdf2, b'secret', b'salt', 1000, 20, prf)
289 def wrapper(*args, **kwds): 290 rng = random.Random(master_seed) 291 for _ in irange(count):
486 if desc is None: 487 assert wlist is not None 488 return self._AssertWarningList(self, desc=wlist, msg=msg)
489 # TODO: make this display better diff of *which* warnings did not match 490 assert desc is not None 491 if not isinstance(desc, (list,tuple)):
626 # create rng 627 value = cache[name] = random.Random(seed) 628 return value
1028 name = handler.name 1029 assert hasattr(handler, "backends"), "handler must support uh.HasManyBackends protocol" 1030 assert backend in handler.backends, "unknown backend: %r" % (backend,)
1029 assert hasattr(handler, "backends"), "handler must support uh.HasManyBackends protocol" 1030 assert backend in handler.backends, "unknown backend: %r" % (backend,) 1031 bases = (cls,)
1181 """ 1182 wrong_secret = 'stub' 1183 for secret in self.stock_passwords:
1367 handler = self.handler 1368 assert has_salt_info(handler), "need explicit bit-size for " + handler.name 1369 from math import log
2218 #-------------------------------------------------- 2219 assert without_error or with_error 2220 for cand_hasher in [without_error, with_error]:
2754 continue 2755 assert result is True or result is False 2756 if not result:
2934 # encoding when testing bytes 2935 password_encoding = "utf-8" 2936
3208 def setUp(self):
3209 assert self.backend == "os_crypt"
3210 if not self.handler.has_backend("os_crypt"):
3255 hash = alt_handler.genhash(secret, hash) 3256 assert isinstance(hash, str) 3257 return hash
3268 """ 3269 assert backend == "os_crypt" 3270 reason = super(OsCryptMixin, cls)._get_skip_backend_reason(backend)
3297 # let 'test' string through so _load_os_crypt_backend() will still work 3298 if secret == "test": 3299 return mock_crypt.__wrapped__(secret, config)
3458 handler = self.handler 3459 password = 'stub' 3460 hash = handler.hash(password, user=self.default_user)
273 encrypt_cost = int(encrypt_cost) 274 assert encrypt_cost >= 0 275 self.encrypt_cost = encrypt_cost
752 if now is not None: 753 assert isinstance(now(), num_types) and now() >= 0, \ 754 "now() function must return non-negative int/float" 755 subcls.now = staticmethod(now)
1112 # generate digest 1113 assert isinstance(counter, int_types), "counter must be integer" 1114 assert counter >= 0, "counter must be non-negative"
1113 assert isinstance(counter, int_types), "counter must be integer" 1114 assert counter >= 0, "counter must be non-negative" 1115 keyed_hmac = self._keyed_hmac
1119 digest_size = keyed_hmac.digest_info.digest_size 1120 assert len(digest) == digest_size, "digest_size: sanity check failed" 1121
1122 # derive 31-bit token value 1123 assert digest_size >= 20, "digest_size: sanity check 2 failed" # otherwise 0xF+4 will run off end of hash. 1124 offset = byte_elem_value(digest[-1]) & 0xF
1131 digits = self.digits
1132 assert 0 < digits < 11, "digits: sanity check failed"
1133 return (u("%0*d") % (digits, value))[-digits:]
1249 counter = self._find_match(token, start, end) 1250 assert counter >= last_counter, "sanity check failed: counter went backward" 1251
1448 """ 1449 assert label, "from_uri() failed to provide label" 1450 if not secret:
1553 param_str = u("&").join(u("%s=%s") % (key, quote(value, '')) for key, value in params)
1554 assert param_str, "param_str should never be empty"
1555
1645 # go ahead and mark as changed (needs re-saving) if the version is too old
1646 assert cls._check_otp_type(type)
1647 ver = kwds.pop("v", None)
1658 # XXX: wallet is known at this point, could decrypt key here.
1659 assert 'key' not in kwds # shouldn't be present w/ enckey
1660 kwds.update(key=kwds.pop("enckey"), format="encrypted")
1900 """ 1901 assert entropy > 0 1902 assert len(charset) > 1
1901 assert entropy > 0 1902 assert len(charset) > 1 1903 count = int(math.ceil(entropy * math.log(2, len(charset))))
265 idx = len(bases) - end_idx 266 assert bases[idx-1] == base 267 break
495 # check for chars mapping stage should have removed 496 assert not in_table_b1(c), "failed to strip B.1 in mapping stage" 497 assert not in_table_c12(c), "failed to replace C.1.2 in mapping stage"
496 assert not in_table_b1(c), "failed to strip B.1 in mapping stage" 497 assert not in_table_c12(c), "failed to replace C.1.2 in mapping stage" 498
656 else: 657 assert index == end 658
670 # validate that result was cut on character boundary
671 assert text.startswith(result.decode("utf-8"))
672 return True
673 674 assert sanity_check() 675
728 """ 729 assert encoding 730 if isinstance(source, bytes):
757 """ 758 assert encoding 759 if isinstance(source, unicode):
817 """ 818 assert none in [True, False, None] 819 if isinstance(value, unicode_or_bytes_types):
898 crypt_accepts_bytes = False 899 except: # no pragma 900 # don't care about other errors this might throw, 901 # just want to see if we get past initial type-coercion step. 902 pass 903
925 # otherwise when crypt() does it's encoding, it'll hash the wrong bytes!
926 assert secret.encode("utf-8") == orig, \
927 "utf-8 spec says this can't happen!"
928 if _NULL in secret:
1009 # so just enforcing "unicode_or_str" limitation 1010 assert isinstance(hash, unicode_or_str), \ 1011 "hash must be unicode_or_str, got %s" % type(hash) 1012 assert hash, "hash must be non-empty"
1011 "hash must be unicode_or_str, got %s" % type(hash) 1012 assert hash, "hash must be non-empty" 1013 return safe_crypt(secret, hash) == hash
1084 # XXX: could reseed on every call 1085 rng = random.Random(genseed()) 1086
127 else: 128 assert isinstance(source, bytes) and len(source) == 255 129 target = list(iter_byte_chars(source))
132 k = ord(k) 133 assert isinstance(k, int) and 0 <= k < 256 134 if isinstance(v, unicode):
135 v = v.encode("ascii")
136 assert isinstance(v, bytes) and len(v) == 1
137 target[k] = v
431 else: 432 assert tail == 2 433 # note: 2 msb of last byte are padding
470 else: 471 assert tail == 2 472 # note: 2 lsb of last byte are padding
631 last = cm[cm.index(last) & mask] 632 assert last in padset, "failed to generate valid padding char" 633 else:
636 last = self._encode64(self._decode64(last) & mask) 637 assert last in padset, "failed to generate valid padding char" 638 if PY3:
790 """ 791 assert value >= 0, "caller did not sanitize input" 792 pad = -bits % 6
101 def u(s): 102 assert isinstance(s, str) 103 return s
111 def u(s):
112 assert isinstance(s, str)
113 return s.decode("unicode_escape")
137 def uascii_to_str(s): 138 assert isinstance(s, unicode) 139 return s
141 def bascii_to_str(s):
142 assert isinstance(s, bytes)
143 return s.decode("ascii")
145 def str_to_uascii(s): 146 assert isinstance(s, str) 147 return s
149 def str_to_bascii(s):
150 assert isinstance(s, str)
151 return s.encode("ascii")
155 def byte_elem_value(elem): 156 assert isinstance(elem, int) 157 return elem
159 def iter_byte_values(s): 160 assert isinstance(s, bytes) 161 return s
163 def iter_byte_chars(s): 164 assert isinstance(s, bytes) 165 # FIXME: there has to be a better way to do this
169 def uascii_to_str(s):
170 assert isinstance(s, unicode)
171 return s.encode("ascii")
173 def bascii_to_str(s): 174 assert isinstance(s, bytes) 175 return s
177 def str_to_uascii(s):
178 assert isinstance(s, str)
179 return s.decode("ascii")
181 def str_to_bascii(s): 182 assert isinstance(s, str) 183 return s
192 def iter_byte_values(s): 193 assert isinstance(s, bytes) 194 return (ord(c) for c in s)
196 def iter_byte_chars(s): 197 assert isinstance(s, bytes) 198 return s
156 hash = to_unicode(hash, "ascii", "hash") 157 assert isinstance(prefix, unicode) 158 if not hash.startswith(prefix):
161 # parse 2-part hash or 1-part config string 162 assert isinstance(sep, unicode) 163 parts = hash[len(prefix):].split(sep)
195 hash = to_unicode(hash, "ascii", "hash") 196 assert isinstance(prefix, unicode) 197 if not hash.startswith(prefix):
200 # parse 3-part hash or 2-part config string 201 assert isinstance(sep, unicode) 202 parts = hash[len(prefix):].split(sep)
309 else: 310 assert rounds_base == 10 311 rounds = unicode(rounds)
357 """ 358 assert default is not None, "%s lacks default %s" % (handler.name, param) 359 assert norm(default) == default, "%s: invalid default %s: %r" % (handler.name, param, default)
358 assert default is not None, "%s lacks default %s" % (handler.name, param) 359 assert norm(default) == default, "%s: invalid default %s: %r" % (handler.name, param, default) 360 return True
468 """ 469 assert cls.truncate_size is not None, "truncate_size must be set by subclass" 470 if cls.truncate_error and len(secret) > cls.truncate_size:
829 self = cls.from_string(hash) 830 assert isinstance(self, cls) 831 return self._calc_needs_update(secret=secret, **kwds)
984 cls = self.__class__ 985 assert cls.__module__ != __name__ 986 wrapper_cls = cls.__cc_compat_hack
1155 ident = self.default_ident 1156 assert validate_default_value(self, ident, self._norm_ident, param="default_ident") 1157 else:
1166 # handle bytes 1167 assert ident is not None 1168 if isinstance(ident, bytes):
1413 salt = self._generate_salt() 1414 assert self._norm_salt(salt) == salt, "generated invalid salt: %r" % (salt,) 1415 else:
1528 def _generate_salt(cls): 1529 assert cls.salt_chars in [None, ALL_BYTE_VALUES] 1530 return getrandbytes(rng, cls.default_salt_size)
1760 # but then users patching cls.vary_rounds / cls.default_rounds would get wrong value. 1761 assert default_rounds 1762 vary_rounds = cls.vary_rounds
1767 if isinstance(vary_rounds, float): 1768 assert 0 <= vary_rounds <= 1 # TODO: deprecate vary_rounds==1 1769 if cls.rounds_cost == "log2":
1784 # calculate bounds based on default_rounds +/- vary_rounds 1785 assert vary_rounds >= 0 and isinstance(vary_rounds, int_types) 1786 lower = linear_to_native(default_rounds - vary_rounds, False)
1798 rounds = self._generate_rounds() 1799 assert self._norm_rounds(rounds) == rounds, "generated invalid rounds: %r" % (rounds,) 1800 else:
1856 lower, upper = cls._calc_vary_rounds_range(rounds) 1857 assert lower <= rounds <= upper 1858 if lower < upper:
1947 if parallelism is None: 1948 assert validate_default_value(self, self.parallelism, self._norm_parallelism, 1949 param="parallelism") 1950 else:
2088 cls.set_backend() 2089 assert cls.__backend, "set_backend() failed to load a default backend" 2090 return cls.__backend
2314 # foul things up if this isn't the owner) 2315 assert cls is cls._get_backend_owner(), "_finalize_backend() not invoked on owner" 2316
2318 mixin_map = cls._backend_mixin_map 2319 assert mixin_map, "_backend_mixin_map not specified" 2320 mixin_cls = mixin_map[name]
2320 mixin_cls = mixin_map[name] 2321 assert issubclass(mixin_cls, SubclassBackendMixin), "invalid mixin class" 2322
2332 def _get_backend_loader(cls, name): 2333 assert cls._backend_mixin_map, "_backend_mixin_map not specified" 2334 return cls._backend_mixin_map[name]._load_backend_mixin
2421 # make sure 1.6 api isn't defined at same time 2422 assert not hasattr(cls, "_has_backend_" + name), ( 2423 "%s: can't specify both ._load_backend_%s() " 2424 "and ._has_backend_%s" % (cls.name, name, name) 2425 ) 2426 return loader
2449 backend = cls._pending_backend 2450 assert backend, "should only be called during set_backend()" 2451 if not callable(func):
2662 subcls = self.wrapped.using(**kwds) 2663 assert subcls is not self.wrapped 2664 # then create identical wrapper which wraps the new subclass.
114 if isinstance(pattern, (str, bytes)):
115 assert include is None, (
116 f"include:{include!r} must be null when pattern:{pattern!r} is a string."
117 )
118 regex, include = self.pattern_to_regex(pattern)
131 # null-operation.
132 assert include is None, (
133 f"include:{include!r} must be null when pattern:{pattern!r} is null."
134 )
135
39 import shlex 40 import subprocess 41 import sys
26 import re 27 import subprocess 28 import time
46 47 output = subprocess.Popen( 48 cmd, stdout=out_location, stderr=err_location, env=newenv 49 ) 50 out = output.communicate()
265 version_tags.add(semver) 266 except Exception: 267 pass 268
44 import re 45 import subprocess 46 import textwrap
103 self.kwargs['env'] = env 104 proc = subprocess.Popen(*self.args, **self.kwargs) 105 out, err = proc.communicate()
217 ) 218 assert retcode == 0, 'gpg key generation failed!' 219
60 except IndexError: 61 assert False, 'source dist not found' 62
70 stdout, _, return_code = self.run_setup('egg_info')
71 assert 'test_hook_1\ntest_hook_2' in stdout
72 assert return_code == 0
71 assert 'test_hook_1\ntest_hook_2' in stdout 72 assert return_code == 0
19 import re 20 import subprocess 21 import sys
106 107 p = subprocess.Popen( 108 popen_cmd, 109 stdout=subprocess.PIPE, 110 stderr=subprocess.PIPE, 111 cwd=self.temp_dir, 112 env=env, 113 ) 114 self.addCleanup(p.kill)
47 import stat 48 import subprocess 49 import sys
94 print('Running %s' % ' '.join(args))
95 p = subprocess.Popen(
96 args,
97 stdin=subprocess.PIPE,
98 stdout=subprocess.PIPE,
99 stderr=subprocess.PIPE,
100 cwd=cwd,
101 env=env,
102 )
103 streams = tuple(s.decode('latin1').strip() for s in p.communicate())
42 spec = PathFinder.find_spec(fullname, [PIP_SOURCES_ROOT], target) 43 assert spec, (PIP_SOURCES_ROOT, fullname) 44 return spec
48
49 assert __name__ == "__main__", "Cannot run __pip-runner__.py as a non-main module"
50 runpy.run_module("pip", run_name="__main__", alter_sys=True)
212 prefix = self._prefixes[prefix_as_string] 213 assert not prefix.setup 214 prefix.setup = True
39 super().__init__() 40 assert not cache_dir or os.path.isabs(cache_dir) 41 self.cache_dir = cache_dir or None
125 parts = self._get_cache_path_parts(link) 126 assert self.cache_dir 127 # Store wheels within the root cache_dir
88 # are present. 89 assert not hasattr(options, "no_index") 90
180 status = run_func(*args) 181 assert isinstance(status, int) 182 return status
14 def main_context(self) -> Generator[None, None, None]: 15 assert not self._in_main_context 16
24 def enter_context(self, context_provider: ContextManager[_T]) -> _T: 25 assert self._in_main_context 26
4 import os 5 import subprocess 6 import sys
100 try: 101 proc = subprocess.run(pip_cmd) 102 returncode = proc.returncode
50 if option.takes_value(): 51 assert option.dest is not None 52 metavar = option.metavar or option.dest.lower()
111 if self.parser is not None: 112 assert isinstance(self.parser, ConfigOptionParser) 113 self.parser._update_defaults(self.parser.defaults)
113 self.parser._update_defaults(self.parser.defaults) 114 assert option.dest is not None 115 default_values = self.parser.defaults.get(option.dest)
167 168 assert self.name 169 super().__init__(*args, **kwargs)
224 225 assert option.dest is not None 226
251 elif option.action == "callback": 252 assert option.callback is not None 253 late_eval.add(option.dest)
284 for option in self._get_all_options(): 285 assert option.dest is not None 286 default = defaults.get(option.dest)
27 ) -> Generator[bytes, None, None]: 28 assert bar_type == "on", "This should only be used in the default mode." 29
101 # then https://github.com/python/mypy/issues/7696 kicks in 102 assert self._session is not None 103 return self._session
112 cache_dir = options.cache_dir 113 assert not cache_dir or os.path.isabs(cache_dir) 114
173 # Make sure the index_group options are present. 174 assert hasattr(options, "no_index") 175
242 ) -> Optional[int]: 243 assert self.tempdir_registry is not None 244 if options.no_clean:
288 temp_build_dir_path = temp_build_dir.path 289 assert temp_build_dir_path is not None 290 legacy_resolver = False
43 def _write(self, status: str) -> None: 44 assert not self._finished 45 # Erase what we wrote before by backspacing to the beginning, writing
82 def _update(self, status: str) -> None: 83 assert not self._finished 84 self._rate_limiter.reset()
114 ) 115 print(BASE_COMPLETION.format(script=script, shell=options.shell)) 116 return SUCCESS
2 import os 3 import subprocess 4 from optparse import Values
238 try:
239 subprocess.check_call(f'{editor} "{fname}"', shell=True)
240 except FileNotFoundError as e:
65 # Try to find version in debundled module info. 66 assert module.__file__ is not None 67 env = get_environment([os.path.dirname(module.__file__)])
136 if req.satisfied_by is None: 137 assert req.name is not None 138 preparer.save_linked_requirement(req)
479 item = f"{item}-{installed_dist.version}"
480 except Exception:
481 pass
482 items.append(item)
508 if options.target_dir: 509 assert target_temp_dir 510 self._handle_target_dir(
597 else: 598 assert resolver_variant == "2020-resolver" 599 parts.append(
698 # If we are here, user installs have not been explicitly requested/avoided 699 assert use_user_site is None 700
83 raise CommandError(message) 84 assert isinstance(hits, list) 85 return hits
167 for req in build_successes: 168 assert req.link and req.link.is_wheel 169 assert req.local_file_path
168 assert req.link and req.link.is_wheel 169 assert req.local_file_path 170 # copy from cache to target directory
129 """Returns the file with highest priority in configuration""" 130 assert self.load_only is not None, "Need to be specified a file to be editing" 131
159 160 assert self.load_only 161 fname, parser = self._get_parser_to_modify()
179 180 assert self.load_only 181 if key not in self._config[self.load_only]:
362 # Determine which parser to modify 363 assert self.load_only 364 parsers = self._parsers[self.load_only]
13 def get_metadata_distribution(self) -> BaseDistribution: 14 assert self.req.satisfied_by is not None, "not actually installed" 15 return self.req.satisfied_by
52 pyproject_requires = self.req.pyproject_requires 53 assert pyproject_requires is not None 54 conflicting, missing = self.req.build_env.check_requirements(
66 pyproject_requires = self.req.pyproject_requires 67 assert pyproject_requires is not None 68
92 backend = self.req.pep517_backend 93 assert backend is not None 94 with backend.subprocess_runner(runner):
102 backend = self.req.pep517_backend 103 assert backend is not None 104 with backend.subprocess_runner(runner):
22 """ 23 assert self.req.local_file_path, "Set as part of preparation during download" 24 assert self.req.name, "Wheels are never unnamed"
23 assert self.req.local_file_path, "Set as part of preparation during download" 24 assert self.req.name, "Wheels are never unnamed" 25 wheel = FilesystemWheel(self.req.local_file_path)
86 if reference is None: 87 assert hasattr(self, "reference"), "error reference not provided!" 88 reference = self.reference
88 reference = self.reference 89 assert _is_kebab_case(reference), "error reference must be kebab-case!" 90
650 else:
651 assert self.error is not None
652 message_part = f".\n{self.error}\n"
192 def __init__(self, page: "IndexContent") -> None: 193 assert page.cache_link_parsing 194 self.page = page
363 """ 364 assert set(applicable_candidates) <= set(candidates) 365
366 if best_candidate is None: 367 assert not applicable_candidates 368 else:
368 else: 369 assert best_candidate in applicable_candidates 370
542 match = re.match(r"^(\d+)(.*)$", wheel.build_tag) 543 assert match is not None, "guaranteed by filename validation" 544 build_tag_groups = match.groups()
846 for candidate in file_candidates: 847 assert candidate.link.url # we need to have a URL 848 try:
66 obj = d.get_command_obj("install", create=True)
67 assert obj is not None
68 i = cast(distutils_install_command, obj)
71 # ideally, we'd prefer a scheme class that has no side-effects.
72 assert not (user and prefix), f"user={user} prefix={prefix}"
73 assert not (home and prefix), f"home={home} prefix={prefix}"
72 assert not (user and prefix), f"user={user} prefix={prefix}"
73 assert not (home and prefix), f"home={home} prefix={prefix}"
74 i.user = user or i.user
87 else:
88 assert dist_dir.endswith(".dist-info")
89 dist_cls = pkg_resources.DistInfoDistribution
59 ) 60 assert infos[0] is not None 61 return infos[0]
183 and self.info.vcs == "git" 184 and user_pass == "git" 185 ):
14 def _install_req_to_dict(cls, ireq: InstallRequirement) -> Dict[str, Any]:
15 assert ireq.download_info, f"No download_info for {ireq}"
16 res = {
69 def __post_init__(self) -> None: 70 assert self.name in _SUPPORTED_HASHES 71
105 if self.hashes is not None: 106 assert all(name in _SUPPORTED_HASHES for name in self.hashes) 107
394 name = urllib.parse.unquote(name)
395 assert name, f"URL {self._url!r} produced no filename"
396 return name
8 import shutil 9 import subprocess 10 import sysconfig
135 env["PYTHONIOENCODING"] = "utf-8" 136 res = subprocess.run( 137 cmd, 138 stdin=subprocess.DEVNULL, 139 stdout=subprocess.PIPE, 140 env=env, 141 ) 142 if res.returncode:
151 env["PYTHONIOENCODING"] = "utf-8"
152 subprocess.run(
153 [self.keyring, "set", service_name, username],
154 input=f"{password}{os.linesep}".encode("utf-8"),
155 env=env,
156 check=True,
157 )
158 return None
425
426 assert (
427 # Credentials were found
428 (username is not None and password is not None)
429 # Credentials were not found
430 or (username is None and password is None)
431 ), f"Could not load credentials from url: {original_url}"
432
547 """Response callback to save credentials on success.""" 548 assert ( 549 self.keyring_provider.has_keyring 550 ), "should never reach here without keyring" 551
37 def __init__(self, directory: str) -> None: 38 assert directory is not None, "Cache directory must not be None." 39 super().__init__()
135 except NetworkConnectionError as e: 136 assert e.response is not None 137 logger.critical(
169 except NetworkConnectionError as e: 170 assert e.response is not None 171 logger.critical(
53 raise_for_status(head) 54 assert head.status_code == 200 55 self._session, self._url, self._chunk_size = session, url, chunk_size
13 import shutil 14 import subprocess 15 import sys
181 try: 182 rustc_output = subprocess.check_output( 183 ["rustc", "--version"], stderr=subprocess.STDOUT, timeout=0.5 184 ) 185 except Exception:
181 try: 182 rustc_output = subprocess.check_output( 183 ["rustc", "--version"], stderr=subprocess.STDOUT, timeout=0.5 184 ) 185 except Exception:
184 ) 185 except Exception: 186 pass 187 else:
38 ) -> Tuple["_Marshallable", ...]: 39 assert isinstance(host, str) 40 parts = (self._scheme, host, handler, None, None, None)
53 except NetworkConnectionError as exc: 54 assert exc.response 55 logger.critical(
36 else: 37 assert isinstance(original_value, str) # for mypy 38 target[name] = original_value
78 79 assert req.link 80 # Get the file to write information about this requirement.
94 # If we're here, req should really not be building already. 95 assert req not in self._entries 96
106 107 assert req.link 108 # Delete the created file and the corresponding entries.
21 """ 22 assert metadata_directory is not None 23 try:
21 """ 22 assert metadata_directory is not None 23 try:
159 editable_project_location = dist.editable_project_location 160 assert editable_project_location 161 location = os.path.normcase(os.path.abspath(editable_project_location))
98 # XXX RECORD hashes will need to be updated 99 assert os.path.isfile(path) 100
619 pyc_path = pyc_output_path(path) 620 assert os.path.exists(pyc_path) 621 pyc_record_path = cast(
76 vcs_backend = vcs.get_backend_for_scheme(link.scheme) 77 assert vcs_backend is not None 78 vcs_backend.unpack(location, url=hide_url(link.url), verbosity=verbosity)
157 158 assert not link.is_existing_dir() 159
307 return 308 assert req.source_dir is None 309 if req.link.is_existing_dir():
395 return None 396 assert req.req is not None 397 logger.info(
471 for req in partially_downloaded_reqs: 472 assert req.link 473 links_to_fully_download[req.link] = req
505 """Prepare a requirement to be obtained from req.link.""" 506 assert req.link 507 self._log_preparing_link(req)
572 ) -> BaseDistribution: 573 assert req.link 574 link = req.link
578 if hashes and req.is_wheel_from_cache: 579 assert req.download_info is not None 580 assert link.is_wheel
579 assert req.download_info is not None 580 assert link.is_wheel 581 assert link.is_file
580 assert link.is_wheel 581 assert link.is_file 582 # We need to verify hashes, and we have found the requirement in the cache
631 # prepare_editable_requirement). 632 assert not req.editable 633 req.download_info = direct_url_from_link(link, req.source_dir)
662 def save_linked_requirement(self, req: InstallRequirement) -> None: 663 assert self.download_dir is not None 664 assert req.link is not None
663 assert self.download_dir is not None 664 assert req.link is not None 665 link = req.link
692 """Prepare an editable requirement.""" 693 assert req.editable, "cannot prepare a non-editable req as editable" 694
705 req.update_editable() 706 assert req.source_dir 707 req.download_info = direct_url_for_editable(req.unpacked_source_directory)
726 """Prepare an already-installed requirement.""" 727 assert req.satisfied_by, "req should have been satisfied but isn't" 728 assert skip_reason is not None, (
727 assert req.satisfied_by, "req should have been satisfied but isn't"
728 assert skip_reason is not None, (
729 "did not get skip reason skipped but req.satisfied_by "
730 "is set to {}".format(req.satisfied_by)
731 )
732 logger.info(
108 # At this point, we know whether we're going to use PEP 517. 109 assert use_pep517 is not None 110
133 # specified a backend, though. 134 assert build_system is not None 135
32 for req in requirements:
33 assert req.name, f"invalid to-be-installed requirement: {req}"
34 yield req.name, req
178 179 assert line.is_requirement 180
473 new_line.append(line) 474 assert primary_line_number is not None 475 yield primary_line_number, "".join(new_line)
485 if new_line: 486 assert primary_line_number is not None 487 yield primary_line_number, "".join(new_line)
88 ) -> None: 89 assert req is None or isinstance(req, Requirement), req 90 self.req = req
102 if self.editable: 103 assert link 104 if link.is_file:
236 return False 237 assert self.pep517_backend 238 with self.build_env:
327 ) -> str: 328 assert build_dir is not None 329 if self._temp_build_dir is not None:
329 if self._temp_build_dir is not None: 330 assert self._temp_build_dir.path 331 return self._temp_build_dir.path
369 """Set requirement after generating metadata.""" 370 assert self.req is None 371 assert self.metadata is not None
370 assert self.req is None 371 assert self.metadata is not None 372 assert self.source_dir is not None
371 assert self.metadata is not None 372 assert self.source_dir is not None 373
465 def setup_py_path(self) -> str:
466 assert self.source_dir, f"No source dir for {self}"
467 setup_py = os.path.join(self.unpacked_source_directory, "setup.py")
472 def setup_cfg_path(self) -> str:
473 assert self.source_dir, f"No source dir for {self}"
474 setup_cfg = os.path.join(self.unpacked_source_directory, "setup.cfg")
479 def pyproject_toml_path(self) -> str:
480 assert self.source_dir, f"No source dir for {self}"
481 return make_pyproject_path(self.unpacked_source_directory)
545 """
546 assert self.source_dir
547 details = self.name or f"from {self.link}"
549 if self.use_pep517: 550 assert self.pep517_backend is not None 551 if (
603 def assert_source_matches_version(self) -> None: 604 assert self.source_dir 605 version = self.metadata["version"]
650 return 651 assert self.editable 652 assert self.source_dir
651 assert self.editable 652 assert self.source_dir 653 if self.link.scheme == "file":
658 # So here, if it's neither a path nor a valid VCS URL, it's a bug.
659 assert vcs_backend, f"Unsupported VCS URL {self.link.url}"
660 hidden_url = hide_url(self.link.url)
678 """ 679 assert self.req 680 dist = get_default_environment().get_distribution(self.req.name)
691 def _clean_zip_name(name: str, prefix: str) -> str:
692 assert name.startswith(
693 prefix + os.path.sep
694 ), f"name {name!r} doesn't start with prefix {prefix!r}"
695 name = name[len(prefix) + 1 :]
707 """ 708 assert self.source_dir 709 if build_dir is None:
803 804 assert self.is_wheel 805 assert self.local_file_path
804 assert self.is_wheel 805 assert self.local_file_path 806
44 def add_unnamed_requirement(self, install_req: InstallRequirement) -> None: 45 assert not install_req.name 46 self.unnamed_requirements.append(install_req)
48 def add_named_requirement(self, install_req: InstallRequirement) -> None: 49 assert install_req.name 50
69 location = dist.location 70 assert location is not None, "not installed" 71
544 )
545 assert os.path.samefile(
546 normalized_link_pointer, normalized_dist_location
547 ), (
548 f"Egg-link {develop_egg_link} (to {link_pointer}) does not match "
549 f"installed location of {dist.raw_name} (at {dist_location})"
550 )
551 paths_to_remove.add(develop_egg_link)
134 super().__init__() 135 assert upgrade_strategy in self._allowed_strategies 136
239 # This next bit is really a sanity check. 240 assert ( 241 not install_req.user_supplied or parent_req_name is None 242 ), "a user supplied req shouldn't have a parent" 243
318 else: 319 assert self.upgrade_strategy == "only-if-needed" 320 return req.user_supplied or req.constraint
453 # so it must be None here. 454 assert req.satisfied_by is None 455 skip_reason = self._check_skip_installed(req)
542 # provided by the user. 543 assert req_to_install.user_supplied 544 self._add_requirement_to_set(
55 ) -> InstallRequirement: 56 assert not template.editable, "template is editable" 57 if template.req:
80 ) -> InstallRequirement: 81 assert template.editable, "template not editable" 82 ireq = install_req_from_editable(
266 ireq = make_install_req_from_link(link, template) 267 assert ireq.link == link 268 if ireq.link.is_wheel and not ireq.link.is_file:
270 wheel_name = canonicalize_name(wheel.name)
271 assert name == wheel_name, f"{name!r} != {wheel_name!r} for wheel"
272 # Version may not be present for PEP 508 direct URLs
274 wheel_version = Version(wheel.version)
275 assert version == wheel_version, "{!r} != {!r} for wheel {}".format(
276 version, wheel_version, name
277 )
278
279 if cache_entry is not None: 280 assert ireq.link.is_wheel 281 assert ireq.link.is_file
280 assert ireq.link.is_wheel 281 assert ireq.link.is_file 282 if cache_entry.persistent and template.link is template.original_link:
243 template = ireqs[0] 244 assert template.req, "Candidates found on index must be PEP 508" 245 name = canonicalize_name(template.req.name)
248 for ireq in ireqs: 249 assert ireq.req, "Candidates found on index must be PEP 508" 250 specifier &= ireq.req.specifier
343 base_cand = as_base_candidate(lookup_cand) 344 assert base_cand is not None, "no extras here" 345 yield self._make_extras_candidate(base_cand, extras)
483 continue 484 assert ireq.name, "Constraint must be named" 485 name = canonicalize_name(ireq.name)
576 ) -> UnsupportedPythonVersion: 577 assert causes, "Requires-Python error reported with no cause" 578
634 ) -> InstallationError: 635 assert e.causes, "Installation error reported with no cause" 636
43 def __init__(self, ireq: InstallRequirement) -> None: 44 assert ireq.link is None, "This is a link, not a specifier" 45 self._ireq = ireq
58 def project_name(self) -> NormalizedName: 59 assert self._ireq.req, "Specifier-backed ireq is always PEP 508" 60 return canonicalize_name(self._ireq.req.name)
82 def is_satisfied_by(self, candidate: Candidate) -> bool:
83 assert candidate.name == self.name, (
84 f"Internal issue: Candidate is not for this requirement "
85 f"{candidate.name} vs {self.name}"
86 )
87 # We can safely always allow prereleases here since PackageFinder
89 # prerelease candidates if the user does not expect them. 90 assert self._ireq.req, "Specifier-backed ireq is always PEP 508" 91 spec = self._ireq.req.specifier
127 def is_satisfied_by(self, candidate: Candidate) -> bool: 128 assert candidate.name == self._candidate.name, "Not Python candidate" 129 # We can safely always allow prereleases here since PackageFinder
52 super().__init__() 53 assert upgrade_strategy in self._allowed_strategies 54
181 """ 182 assert self._result is not None, "must call resolve() first" 183
282 difference = set(weights.keys()).difference(requirement_keys) 283 assert not difference, difference 284
22 else: 23 assert isinstance(direct_url.info, DirInfo) 24 requirement += direct_url.url
43 vcs_backend = vcs.get_backend_for_scheme(link.scheme) 44 assert vcs_backend 45 url, requested_revision, _ = vcs_backend.get_url_rev_and_auth(
54 # with the VCS checkout. 55 assert requested_revision 56 commit_id = requested_revision
60 # which we can inspect to find out the commit id. 61 assert source_dir 62 commit_id = vcs_backend.get_revision(source_dir)
30 result = ENCODING_RE.search(line)
31 assert result is not None
32 encoding = result.groups()[0].decode("ascii")
21 22 assert os.path.isabs(path) 23
99 for _ in range(10): 100 name = basename + "".join(random.choice(alphabet) for _ in range(6)) 101 file = os.path.join(path, name)
156 # If we are given a diagnostic error to present, present it with indentation. 157 assert isinstance(record.args, tuple) 158 if record.msg == "[present-rich] %s" and len(record.args) == 1:
159 rich_renderable = record.args[0]
160 assert isinstance(
161 rich_renderable, (ConsoleRenderable, RichCast, str)
162 ), f"{rich_renderable} is not rich-console-renderable"
163
473 user = "****" 474 password = "" 475 else:
476 user = urllib.parse.quote(user)
477 password = ":****"
478 return "{user}{password}@{netloc}".format(
112 ) -> List[str]: 113 assert not (use_user_site and prefix) 114
3 import shlex 4 import subprocess 5 from typing import (
140 try: 141 proc = subprocess.Popen( 142 # Convert HiddenText objects to the underlying str. 143 reveal_command_args(cmd), 144 stdin=subprocess.PIPE, 145 stdout=subprocess.PIPE, 146 stderr=subprocess.STDOUT if not stdout_only else subprocess.PIPE, 147 cwd=cwd, 148 env=env, 149 errors="backslashreplace", 150 ) 151 except Exception as exc:
160 if not stdout_only: 161 assert proc.stdout 162 assert proc.stdin
161 assert proc.stdout 162 assert proc.stdin 163 proc.stdin.close()
175 if use_spinner: 176 assert spinner 177 spinner.spin()
198 if use_spinner: 199 assert spinner 200 if proc_had_error:
132 if globally_managed: 133 assert _tempdir_manager is not None 134 _tempdir_manager.enter_context(self)
137 def path(self) -> str:
138 assert not self._deleted, f"Attempted to access deleted path: {self._path}"
139 return self._path
215 ensure_dir(os.path.dirname(path)) 216 assert fp is not None 217 with open(path, "wb") as destfp:
29 """
30 assert url.startswith(
31 "file:"
32 ), f"You can only turn file: urls into filenames (not {url!r})"
33
213 # rev return value is always non-None. 214 assert rev is not None 215
476 if "://" not in url:
477 assert "file:" not in url
478 url = url.replace("git+", "git+ssh://")
64 if base == location: 65 assert dirurl is not None 66 base = dirurl + "/" # save the root url
168 match = _svn_info_xml_url_re.search(xml) 169 assert match is not None 170 url = match.group(1)
104 # unless it points to an immutable commit hash. 105 assert not req.editable 106 assert req.source_dir
105 assert not req.editable 106 assert req.source_dir 107 vcs_backend = vcs.get_backend_for_scheme(req.link.scheme)
107 vcs_backend = vcs.get_backend_for_scheme(req.link.scheme) 108 assert vcs_backend 109 if vcs_backend.is_immutable_rev_checkout(req.link.url, req.source_dir):
112 113 assert req.link 114 base, ext = req.link.splitext()
129 cache_available = bool(wheel_cache.cache_dir) 130 assert req.link 131 if cache_available and _should_cache(req):
213 with TempDirectory(kind="wheel") as temp_dir: 214 assert req.name 215 if req.use_pep517:
215 if req.use_pep517: 216 assert req.metadata_directory 217 assert req.pep517_backend
216 assert req.metadata_directory 217 assert req.pep517_backend 218 if global_options:
317 for req in requirements: 318 assert req.name 319 cache_dir = _get_cache_dir(req, wheel_cache)
337 req.local_file_path = req.link.file_path 338 assert req.link.is_wheel 339 build_successes.append(req)
11 try: 12 import cPickle as pickle 13 except ImportError:
13 except ImportError: 14 import pickle 15
157 def _loads_v2(self, request, data, body_file=None): 158 assert body_file is None 159 try:
58 def feed(self, byte_str: Union[bytes, bytearray]) -> ProbingState: 59 assert self.coding_sm is not None 60 assert self.distribution_analyzer is not None
59 assert self.coding_sm is not None 60 assert self.distribution_analyzer is not None 61
97 def get_confidence(self) -> float: 98 assert self.distribution_analyzer is not None 99
272 def charset_name(self) -> str: 273 assert self._logical_prober is not None 274 assert self._visual_prober is not None
273 assert self._logical_prober is not None 274 assert self._visual_prober is not None 275
307 def state(self) -> ProbingState: 308 assert self._logical_prober is not None 309 assert self._visual_prober is not None
308 assert self._logical_prober is not None 309 assert self._visual_prober is not None 310
57 def feed(self, byte_str: Union[bytes, bytearray]) -> ProbingState: 58 assert self.coding_sm is not None 59 assert self.distribution_analyzer is not None
58 assert self.coding_sm is not None 59 assert self.distribution_analyzer is not None 60
93 def get_confidence(self) -> float: 94 assert self.distribution_analyzer is not None 95 return self.distribution_analyzer.get_confidence()
58 def feed(self, byte_str: Union[bytes, bytearray]) -> ProbingState: 59 assert self.coding_sm is not None 60 assert self.distribution_analyzer is not None
59 assert self.coding_sm is not None 60 assert self.distribution_analyzer is not None 61
100 def get_confidence(self) -> float: 101 assert self.distribution_analyzer is not None 102
318 charset_name = max_prober.charset_name 319 assert charset_name is not None 320 lower_charset_name = charset_name.lower()
41 import httplib 42 import xmlrpclib 43 import Queue as queue
618 def cache_from_source(path, debug_override=None):
619 assert path.endswith('.py')
620 if debug_override is None:
1023 f.close() 1024 return hashlib.md5(content).hexdigest() 1025
10 import shutil 11 import subprocess 12 import tempfile
56 try: 57 rc = subprocess.check_call([s, '--version'], stdout=sink, 58 stderr=sink) 59 if rc == 0:
191 stderr = [] 192 p = subprocess.Popen(cmd, **kwargs) 193 # We don't use communicate() here because we may need to
267 file_data = f.read() 268 md5_digest = hashlib.md5(file_data).hexdigest() 269 sha256_digest = hashlib.sha256(file_data).hexdigest()
949 super(DistPathLocator, self).__init__(**kwargs) 950 assert isinstance(distpath, DistributionPath) 951 self.distpath = distpath
112 parent, _ = os.path.split(d)
113 assert parent not in ('', '/')
114 add_dir(dirs, parent)
338 if _PYTHON_VERSION > (3, 2): 339 assert pattern_re.startswith(start) and pattern_re.endswith(end) 340 else:
350 prefix_re = self._glob_to_re(prefix) 351 assert prefix_re.startswith(start) and prefix_re.endswith(end) 352 prefix_re = prefix_re[len(start): len(prefix_re) - len(end)]
71 else: 72 assert isinstance(expr, dict) 73 op = expr['op']
937 def _from_legacy(self):
938 assert self._legacy and not self._data
939 result = {
1000 1001 assert self._data and not self._legacy 1002 result = LegacyMetadata()
284 os.remove(dfname) 285 except Exception: 286 pass # still in use - ignore error 287 else:
20 ssl = None 21 import subprocess 22 import sys
276 path = path.replace(os.path.sep, '/')
277 assert path.startswith(root)
278 return path[len(root):].lstrip('/')
362 entry = get_export_entry(s) 363 assert entry is not None 364 entries[k] = entry
389 entry = get_export_entry(s) 390 assert entry is not None 391 #entry.dist = self
536 def copy_stream(self, instream, outfile, encoding=None): 537 assert not os.path.isdir(outfile) 538 self.ensure_dir(os.path.dirname(outfile))
596 else: 597 assert path.startswith(prefix) 598 diagpath = path[len(prefix):]
643 """ 644 assert self.record 645 result = self.files_written, self.dirs_created
660 if flist: 661 assert flist == ['__pycache__'] 662 sd = os.path.join(d, flist[0])
836 break 837 assert i is not None 838 return result
1088 def add(self, pred, succ): 1089 assert pred != succ 1090 self._preds.setdefault(succ, set()).add(pred)
1093 def remove(self, pred, succ): 1094 assert pred != succ 1095 try:
1251 tarinfo.name = tarinfo.name.decode('utf-8')
1252 archive.extractall(dest_dir)
1253
1282 def __init__(self, minval=0, maxval=100): 1283 assert maxval is None or maxval >= minval 1284 self.min = self.cur = minval
1290 def update(self, curval): 1291 assert self.min <= curval 1292 assert self.max is None or curval <= self.max
1291 assert self.min <= curval 1292 assert self.max is None or curval <= self.max 1293 self.cur = curval
1300 def increment(self, incr): 1301 assert incr >= 0 1302 self.update(self.cur + incr)
1390 if len(rich_path_glob) > 1: 1391 assert len(rich_path_glob) == 3, rich_path_glob 1392 prefix, set, suffix = rich_path_glob
1711 def run_command(self, cmd, **kwargs): 1712 p = subprocess.Popen(cmd, stdout=subprocess.PIPE, 1713 stderr=subprocess.PIPE, **kwargs) 1714 t1 = threading.Thread(target=self.reader, args=(p.stdout, 'stdout'))
33 self._parts = parts = self.parse(s) 34 assert isinstance(parts, tuple) 35 assert len(parts) > 0
34 assert isinstance(parts, tuple) 35 assert len(parts) > 0 36
427 break 428 assert distinfo, '.dist-info directory expected, not found' 429
36 import shlex 37 import subprocess 38 import sys
639 def __get__(self, obj: Any, owner: Type[Any]) -> Any:
640 assert obj is not None, f"call {self._fname} on an instance"
641 ret = obj.__dict__[self._fname] = self._f(obj)
1160 cmd = ("lsb_release", "-a")
1161 stdout = subprocess.check_output(cmd, stderr=subprocess.DEVNULL)
1162 # Command not found or lsb_release returned error
1197 cmd = ("uname", "-rs")
1198 stdout = subprocess.check_output(cmd, stderr=subprocess.DEVNULL)
1199 except OSError:
1208 try:
1209 stdout = subprocess.check_output("oslevel", stderr=subprocess.DEVNULL)
1210 except (OSError, subprocess.CalledProcessError):
1208 try:
1209 stdout = subprocess.check_output("oslevel", stderr=subprocess.DEVNULL)
1210 except (OSError, subprocess.CalledProcessError):
368 def feed(self, next_bytes): 369 assert self._feeding 370 view = _get_data_from_buffer(next_bytes)
432 break 433 assert isinstance(read_data, bytes) 434 self._buffer += read_data
616 return self._ext_hook(n, bytes(obj)) 617 assert typ == TYPE_IMMEDIATE 618 return obj
832 data = obj.data 833 assert isinstance(code, int) 834 assert isinstance(data, bytes)
833 assert isinstance(code, int) 834 assert isinstance(data, bytes) 835 L = len(data)
145 version_string = os.confstr("CS_GNU_LIBC_VERSION")
146 assert version_string is not None
147 _, version = version_string.split()
12 import struct 13 import subprocess 14 import sys
105 return None 106 proc = subprocess.run([ld], stderr=subprocess.PIPE, universal_newlines=True) 107 return _parse_musl_version(proc.stderr)
129 plat = sysconfig.get_platform()
130 assert plat.startswith("linux-"), "not linux"
131
151 152 assert isinstance(marker, (list, tuple, str)) 153
225 for marker in markers: 226 assert isinstance(marker, (list, tuple, str)) 227
241 else: 242 assert marker in ["and", "or"] 243 if marker == "or":
1560 code = compile(source, script_filename, 'exec') 1561 exec(code, namespace, namespace) 1562 else:
1571 script_code = compile(script_text, script_filename, 'exec') 1572 exec(script_code, namespace, namespace) 1573
102 """:return: cache directory shared by users, e.g. ``/var/tmp/$appname/$version``"""
103 return self._append_app_name_and_version("/var/tmp") # noqa: S108
104
163 if not Path(path).exists():
164 path = f"/tmp/runtime-{getuid()}" # noqa: S108
165 else:
521 width = shutil.get_terminal_size().columns - 2 522 except Exception: 523 pass 524 argparse.HelpFormatter.__init__(self, prog, indent_increment,
102 with open(filename, 'rb') as f: 103 exec(f.read(), custom_namespace) 104 # Retrieve the class `formattername` from that namespace
17 18 import subprocess 19
84 def _get_nix_font_path(self, name, style): 85 proc = subprocess.Popen(['fc-list', "%s:style=%s" % (name, style), 'file'], 86 stdout=subprocess.PIPE, stderr=None) 87 stdout, _ = proc.communicate()
84 def _get_nix_font_path(self, name, style): 85 proc = subprocess.Popen(['fc-list', "%s:style=%s" % (name, style), 'file'], 86 stdout=subprocess.PIPE, stderr=None) 87 stdout, _ = proc.communicate()
491 """Preprocess the token component of a token definition.""" 492 assert type(token) is _TokenType or callable(token), \ 493 'token type must be simple type or callable, not %r' % (token,) 494 return token
508 else: 509 assert False, 'unknown new state %r' % new_state 510 elif isinstance(new_state, combined):
515 for istate in new_state: 516 assert istate != new_state, 'circular state ref %r' % istate 517 itokens.extend(cls._process_state(unprocessed,
523 for istate in new_state:
524 assert (istate in unprocessed or
525 istate in ('#pop', '#push')), \
526 'unknown new state ' + istate
527 return new_state
528 else: 529 assert False, 'unknown new state def %r' % new_state 530
532 """Preprocess a single state definition.""" 533 assert type(state) is str, "wrong state name %r" % state 534 assert state[0] != '#', "invalid state name %r" % state
533 assert type(state) is str, "wrong state name %r" % state 534 assert state[0] != '#', "invalid state name %r" % state 535 if state in processed:
541 # it's a state reference 542 assert tdef != state, "circular state reference %r" % state 543 tokens.extend(cls._process_state(unprocessed, processed,
555 556 assert type(tdef) is tuple, "wrong rule def %r" % tdef 557
722 else: 723 assert False, "wrong state def: %r" % new_state 724 statetokens = tokendefs[statestack[-1]]
810 else: 811 assert False, "wrong state def: %r" % new_state 812 statetokens = tokendefs[ctx.stack[-1]]
152 with open(filename, 'rb') as f: 153 exec(f.read(), custom_namespace) 154 # Retrieve the class `lexername` from that namespace
78 return text 79 assert False, "wrong color format %r" % text 80
7 from os.path import join as pjoin 8 from subprocess import STDOUT, check_call, check_output 9
58 59 check_call(cmd, cwd=cwd, env=env) 60
70 71 check_output(cmd, cwd=cwd, env=env, stderr=STDOUT) 72
56 urllib3_version = urllib3_version.split(".")
57 assert urllib3_version != ["dev"] # Verify urllib3 isn't installed from git.
58
66 # urllib3 >= 1.21.1 67 assert major >= 1 68 if major == 1:
68 if major == 1: 69 assert minor >= 21 70
75 # chardet_version >= 3.0.2, < 6.0.0 76 assert (3, 0, 2) <= (major, minor, patch) < (6, 0, 0) 77 elif charset_normalizer_version:
80 # charset_normalizer >= 2.0.0 < 4.0.0 81 assert (2, 0, 0) <= (major, minor, patch) < (4, 0, 0) 82 else:
44 """ 45 assert isinstance(u_string, str) 46 try:
147 x = x.encode("utf-8")
148 return hashlib.md5(x).hexdigest()
149
155 x = x.encode("utf-8")
156 return hashlib.sha1(x).hexdigest()
157
204 205 cnonce = hashlib.sha1(s).hexdigest()[:16] 206 if _algorithm == "MD5-SESS":
12 """ 13 assert values, "1 or more values required" 14 for value in values:
128 total_ratio = sum(ratios) 129 assert total_ratio > 0, "Sum of ratios must be > 0" 130
435 436 assert fore is not None 437 assert back is not None
436 assert fore is not None 437 assert back is not None 438
565 """ 566 assert len(title) < 255, "Console title must be less than 255 characters" 567 SetConsoleTitle(title)
364 if self.type == ColorType.TRUECOLOR: 365 assert self.triplet is not None 366 return self.triplet
367 elif self.type == ColorType.EIGHT_BIT: 368 assert self.number is not None 369 return EIGHT_BIT_PALETTE[self.number]
370 elif self.type == ColorType.STANDARD: 371 assert self.number is not None 372 return theme.ansi_colors[self.number]
373 elif self.type == ColorType.WINDOWS: 374 assert self.number is not None 375 return WINDOWS_PALETTE[self.number]
376 else: # self.type == ColorType.DEFAULT: 377 assert self.number is None 378 return theme.foreground_color if foreground else theme.background_color
492 number = self.number 493 assert number is not None 494 fore, back = (30, 40) if number < 8 else (82, 92)
498 number = self.number 499 assert number is not None 500 fore, back = (30, 40) if number < 8 else (82, 92)
503 elif _type == ColorType.EIGHT_BIT:
504 assert self.number is not None
505 return ("38" if foreground else "48", "5", str(self.number))
507 else: # self.standard == ColorStandard.TRUECOLOR: 508 assert self.triplet is not None 509 red, green, blue = self.triplet
519 if system == ColorSystem.EIGHT_BIT and self.system == ColorSystem.TRUECOLOR: 520 assert self.triplet is not None 521 _h, l, s = rgb_to_hls(*self.triplet.normalized)
545 if self.system == ColorSystem.TRUECOLOR: 546 assert self.triplet is not None 547 triplet = self.triplet
548 else: # self.system == ColorSystem.EIGHT_BIT 549 assert self.number is not None 550 triplet = ColorTriplet(*EIGHT_BIT_PALETTE[self.number])
556 if self.system == ColorSystem.TRUECOLOR: 557 assert self.triplet is not None 558 triplet = self.triplet
559 else: # self.system == ColorSystem.EIGHT_BIT 560 assert self.number is not None 561 if self.number < 16:
572 """Parse six hex characters in to RGB triplet.""" 573 assert len(hex_color) == 6, "must be 6 characters" 574 color = ColorTriplet(
1134 1135 assert count >= 0, "count must be >= 0" 1136 self.print(NewLine(count))
1899 offset -= 1 1900 assert frame is not None 1901 return frame.f_code.co_filename, frame.f_lineno, frame.f_locals
2137 """ 2138 assert ( 2139 self.record 2140 ), "To export console contents set record=True in the constructor or instance" 2141
2193 """ 2194 assert ( 2195 self.record 2196 ), "To export console contents set record=True in the constructor or instance" 2197 fragments: List[str] = []
64 ) -> None: 65 assert refresh_per_second > 0, "refresh_per_second must be > 0" 66 self._renderable = renderable
351 time.sleep(0.4) 352 if random.randint(0, 10) < 1: 353 console.log(next(examples))
354 exchange_rate_dict[(select_exchange, exchange)] = 200 / ( 355 (random.random() * 320) + 1 356 )
135 exc_type, exc_value, exc_traceback = record.exc_info 136 assert exc_type is not None 137 assert exc_value is not None
136 assert exc_type is not None 137 assert exc_value is not None 138 traceback = Traceback.from_exception(
190 console = console or get_console() 191 assert console is not None 192
195 if value is not None: 196 assert console is not None 197 builtins._ = None # type: ignore[attr-defined]
495 ) 496 assert self.node is not None 497 return self.node.check_length(start_length, max_length)
501 node = self.node 502 assert node is not None 503 whitespace = self.whitespace
503 whitespace = self.whitespace 504 assert node.children 505 if node.key_repr:
640 rich_repr_result = obj.__rich_repr__() 641 except Exception: 642 pass 643
1079 ) -> None: 1080 assert refresh_per_second > 0, "refresh_per_second must be > 0" 1081 self._lock = RLock()
1700 time.sleep(0.01) 1701 if random.randint(0, 100) < 1: 1702 progress.log(next(examples))
213 """ 214 assert self.choices is not None 215 return value.strip() in self.choices
192 self._link_id = (
193 f"{randint(0, 999999)}{hash(self._meta)}" if (link or meta) else ""
194 )
242 style._meta = dumps(meta)
243 style._link_id = f"{randint(0, 999999)}{hash(style._meta)}"
244 style._hash = None
474 """Get meta information (can not be changed after construction)."""
475 return {} if self._meta is None else cast(Dict[str, Any], loads(self._meta))
476
489 style._link = self._link
490 style._link_id = f"{randint(0, 999999)}" if self._link else ""
491 style._null = False
641 style._link = self._link
642 style._link_id = f"{randint(0, 999999)}" if self._link else ""
643 style._hash = self._hash
687 style._link = link
688 style._link_id = f"{randint(0, 999999)}" if link else ""
689 style._hash = None
481 """Split tokens to one per line.""" 482 assert lexer # required to make MyPy happy - we know lexer is not None at this point 483
786 tab_size = self.tab_size 787 assert tab_size is not None 788 result = self.blank_copy()
855 """ 856 assert len(character) == 1, "Character must be a string of length 1" 857 if count:
872 """ 873 assert len(character) == 1, "Character must be a string of length 1" 874 if count:
888 """ 889 assert len(character) == 1, "Character must be a string of length 1" 890 if count:
1023 """ 1024 assert separator, "separator must not be empty" 1025
281 if not isinstance(suppress_entity, str):
282 assert (
283 suppress_entity.__file__ is not None
284 ), f"{suppress_entity!r} must be a module with '__file__' attribute"
285 path = os.path.dirname(suppress_entity.__file__)
644 if excluded: 645 assert exclude_frames is not None 646 yield Text(
734 _locs_ = _globs_
735 exec("""exec _code_ in _globs_, _locs_""")
736
71 def __call__(self, retry_state: "RetryCallState") -> float: 72 return self.wait_random_min + (random.random() * (self.wait_random_max - self.wait_random_min)) 73
193 high = super().__call__(retry_state=retry_state) 194 return random.uniform(0, high) 195
221 def __call__(self, retry_state: "RetryCallState") -> float: 222 jitter = random.uniform(0, self.jitter) 223 try:
373 deduped_pairs.remove(pair) 374 assert not deduped_pairs, deduped_pairs 375 parameters = tuple(new_parameters)
1299 def copy_with(self, params): 1300 assert len(params) == 1 1301 new_type = params[0]
2612 def __new__(cls, typename, bases, ns): 2613 assert _NamedTuple in bases 2614 for base in bases:
2653 def _namedtuple_mro_entries(bases): 2654 assert NamedTuple in bases 2655 return (_NamedTuple,)
718 leaf = Security.SecTrustGetCertificateAtIndex(trust, 0) 719 assert leaf 720
722 certdata = Security.SecCertificateCopyData(leaf) 723 assert certdata 724
900 # See PEP 543 for the real deal. 901 assert not server_side 902 assert do_handshake_on_connect
901 assert not server_side 902 assert do_handshake_on_connect 903 assert suppress_ragged_eofs
902 assert do_handshake_on_connect 903 assert suppress_ragged_eofs 904
22 reading = "r" in mode or not writing 23 assert reading or writing 24 binary = "b" in mode
44 else: 45 assert writing 46 buffer = io.BufferedWriter(raw, buffering)
149 sys.excepthook(*sys.exc_info()) 150 assert f not in cls._registry 151 finally:
786 _locs_ = _globs_
787 exec ("""exec _code_ in _globs_, _locs_""")
788
494 """ 495 assert self._fp 496 c_int_max = 2 ** 31 - 1
140 has_ipv6 = True 141 except Exception: 142 pass 143
178 } 179 return wrap_socket(socket, ciphers=self.ciphers, **kwargs) 180
119 reading = "r" in mode or not writing 120 assert reading or writing 121 binary = "b" in mode
141 else: 142 assert writing 143 buffer = io.BufferedWriter(raw, buffering)
223 if output: 224 assert decoder.encoding is not None 225 yield decoder.encoding
230 output = decode(b'', final=True) 231 assert decoder.encoding is not None 232 yield decoder.encoding
20 def assert_lower(string): 21 assert string == string.lower() 22 return string
46 repr(encoding['name']).lstrip('u'))
47 for category in json.loads(urlopen(url).read().decode('ascii'))
48 for encoding in category['encodings']
29 def test_labels():
30 assert lookup('utf-8').name == 'utf-8'
31 assert lookup('Utf-8').name == 'utf-8'
30 assert lookup('utf-8').name == 'utf-8'
31 assert lookup('Utf-8').name == 'utf-8'
32 assert lookup('UTF-8').name == 'utf-8'
31 assert lookup('Utf-8').name == 'utf-8'
32 assert lookup('UTF-8').name == 'utf-8'
33 assert lookup('utf8').name == 'utf-8'
32 assert lookup('UTF-8').name == 'utf-8'
33 assert lookup('utf8').name == 'utf-8'
34 assert lookup('utf8').name == 'utf-8'
33 assert lookup('utf8').name == 'utf-8'
34 assert lookup('utf8').name == 'utf-8'
35 assert lookup('utf8 ').name == 'utf-8'
34 assert lookup('utf8').name == 'utf-8'
35 assert lookup('utf8 ').name == 'utf-8'
36 assert lookup(' \r\nutf8\t').name == 'utf-8'
35 assert lookup('utf8 ').name == 'utf-8'
36 assert lookup(' \r\nutf8\t').name == 'utf-8'
37 assert lookup('u8') is None # Python label.
36 assert lookup(' \r\nutf8\t').name == 'utf-8'
37 assert lookup('u8') is None # Python label.
38 assert lookup('utf-8 ') is None # Non-ASCII white space.
37 assert lookup('u8') is None # Python label.
38 assert lookup('utf-8 ') is None # Non-ASCII white space.
39
39
40 assert lookup('US-ASCII').name == 'windows-1252'
41 assert lookup('iso-8859-1').name == 'windows-1252'
40 assert lookup('US-ASCII').name == 'windows-1252'
41 assert lookup('iso-8859-1').name == 'windows-1252'
42 assert lookup('latin1').name == 'windows-1252'
41 assert lookup('iso-8859-1').name == 'windows-1252'
42 assert lookup('latin1').name == 'windows-1252'
43 assert lookup('LATIN1').name == 'windows-1252'
42 assert lookup('latin1').name == 'windows-1252'
43 assert lookup('LATIN1').name == 'windows-1252'
44 assert lookup('latin-1') is None
43 assert lookup('LATIN1').name == 'windows-1252'
44 assert lookup('latin-1') is None
45 assert lookup('LATİN1') is None # ASCII-only case insensitivity.
44 assert lookup('latin-1') is None
45 assert lookup('LATİN1') is None # ASCII-only case insensitivity.
46
49 for label in LABELS:
50 assert decode(b'', label) == ('', lookup(label))
51 assert encode('', label) == b''
50 assert decode(b'', label) == ('', lookup(label))
51 assert encode('', label) == b''
52 for repeat in [0, 1, 12]:
53 output, _ = iter_decode([b''] * repeat, label) 54 assert list(output) == [] 55 assert list(iter_encode([''] * repeat, label)) == []
54 assert list(output) == [] 55 assert list(iter_encode([''] * repeat, label)) == [] 56 decoder = IncrementalDecoder(label)
56 decoder = IncrementalDecoder(label) 57 assert decoder.decode(b'') == '' 58 assert decoder.decode(b'', final=True) == ''
57 assert decoder.decode(b'') == '' 58 assert decoder.decode(b'', final=True) == '' 59 encoder = IncrementalEncoder(label)
59 encoder = IncrementalEncoder(label)
60 assert encoder.encode('') == b''
61 assert encoder.encode('', final=True) == b''
60 assert encoder.encode('') == b''
61 assert encoder.encode('', final=True) == b''
62 # All encoding names are valid labels too:
63 for name in set(LABELS.values()): 64 assert lookup(name).name == name 65
76 def test_decode():
77 assert decode(b'\x80', 'latin1') == ('€', lookup('latin1'))
78 assert decode(b'\x80', lookup('latin1')) == ('€', lookup('latin1'))
77 assert decode(b'\x80', 'latin1') == ('€', lookup('latin1'))
78 assert decode(b'\x80', lookup('latin1')) == ('€', lookup('latin1'))
79 assert decode(b'\xc3\xa9', 'utf8') == ('é', lookup('utf8'))
78 assert decode(b'\x80', lookup('latin1')) == ('€', lookup('latin1'))
79 assert decode(b'\xc3\xa9', 'utf8') == ('é', lookup('utf8'))
80 assert decode(b'\xc3\xa9', UTF8) == ('é', lookup('utf8'))
79 assert decode(b'\xc3\xa9', 'utf8') == ('é', lookup('utf8'))
80 assert decode(b'\xc3\xa9', UTF8) == ('é', lookup('utf8'))
81 assert decode(b'\xc3\xa9', 'ascii') == ('é', lookup('ascii'))
80 assert decode(b'\xc3\xa9', UTF8) == ('é', lookup('utf8'))
81 assert decode(b'\xc3\xa9', 'ascii') == ('é', lookup('ascii'))
82 assert decode(b'\xEF\xBB\xBF\xc3\xa9', 'ascii') == ('é', lookup('utf8')) # UTF-8 with BOM
81 assert decode(b'\xc3\xa9', 'ascii') == ('é', lookup('ascii'))
82 assert decode(b'\xEF\xBB\xBF\xc3\xa9', 'ascii') == ('é', lookup('utf8')) # UTF-8 with BOM
83
83
84 assert decode(b'\xFE\xFF\x00\xe9', 'ascii') == ('é', lookup('utf-16be')) # UTF-16-BE with BOM
85 assert decode(b'\xFF\xFE\xe9\x00', 'ascii') == ('é', lookup('utf-16le')) # UTF-16-LE with BOM
84 assert decode(b'\xFE\xFF\x00\xe9', 'ascii') == ('é', lookup('utf-16be')) # UTF-16-BE with BOM
85 assert decode(b'\xFF\xFE\xe9\x00', 'ascii') == ('é', lookup('utf-16le')) # UTF-16-LE with BOM
86 assert decode(b'\xFE\xFF\xe9\x00', 'ascii') == ('\ue900', lookup('utf-16be'))
85 assert decode(b'\xFF\xFE\xe9\x00', 'ascii') == ('é', lookup('utf-16le')) # UTF-16-LE with BOM
86 assert decode(b'\xFE\xFF\xe9\x00', 'ascii') == ('\ue900', lookup('utf-16be'))
87 assert decode(b'\xFF\xFE\x00\xe9', 'ascii') == ('\ue900', lookup('utf-16le'))
86 assert decode(b'\xFE\xFF\xe9\x00', 'ascii') == ('\ue900', lookup('utf-16be'))
87 assert decode(b'\xFF\xFE\x00\xe9', 'ascii') == ('\ue900', lookup('utf-16le'))
88
88
89 assert decode(b'\x00\xe9', 'UTF-16BE') == ('é', lookup('utf-16be'))
90 assert decode(b'\xe9\x00', 'UTF-16LE') == ('é', lookup('utf-16le'))
89 assert decode(b'\x00\xe9', 'UTF-16BE') == ('é', lookup('utf-16be'))
90 assert decode(b'\xe9\x00', 'UTF-16LE') == ('é', lookup('utf-16le'))
91 assert decode(b'\xe9\x00', 'UTF-16') == ('é', lookup('utf-16le'))
90 assert decode(b'\xe9\x00', 'UTF-16LE') == ('é', lookup('utf-16le'))
91 assert decode(b'\xe9\x00', 'UTF-16') == ('é', lookup('utf-16le'))
92
92
93 assert decode(b'\xe9\x00', 'UTF-16BE') == ('\ue900', lookup('utf-16be'))
94 assert decode(b'\x00\xe9', 'UTF-16LE') == ('\ue900', lookup('utf-16le'))
93 assert decode(b'\xe9\x00', 'UTF-16BE') == ('\ue900', lookup('utf-16be'))
94 assert decode(b'\x00\xe9', 'UTF-16LE') == ('\ue900', lookup('utf-16le'))
95 assert decode(b'\x00\xe9', 'UTF-16') == ('\ue900', lookup('utf-16le'))
94 assert decode(b'\x00\xe9', 'UTF-16LE') == ('\ue900', lookup('utf-16le'))
95 assert decode(b'\x00\xe9', 'UTF-16') == ('\ue900', lookup('utf-16le'))
96
98 def test_encode():
99 assert encode('é', 'latin1') == b'\xe9'
100 assert encode('é', 'utf8') == b'\xc3\xa9'
99 assert encode('é', 'latin1') == b'\xe9'
100 assert encode('é', 'utf8') == b'\xc3\xa9'
101 assert encode('é', 'utf8') == b'\xc3\xa9'
100 assert encode('é', 'utf8') == b'\xc3\xa9'
101 assert encode('é', 'utf8') == b'\xc3\xa9'
102 assert encode('é', 'utf-16') == b'\xe9\x00'
101 assert encode('é', 'utf8') == b'\xc3\xa9'
102 assert encode('é', 'utf-16') == b'\xe9\x00'
103 assert encode('é', 'utf-16le') == b'\xe9\x00'
102 assert encode('é', 'utf-16') == b'\xe9\x00'
103 assert encode('é', 'utf-16le') == b'\xe9\x00'
104 assert encode('é', 'utf-16be') == b'\x00\xe9'
103 assert encode('é', 'utf-16le') == b'\xe9\x00'
104 assert encode('é', 'utf-16be') == b'\x00\xe9'
105
110 return ''.join(output) 111 assert iter_decode_to_string([], 'latin1') == '' 112 assert iter_decode_to_string([b''], 'latin1') == ''
111 assert iter_decode_to_string([], 'latin1') == '' 112 assert iter_decode_to_string([b''], 'latin1') == '' 113 assert iter_decode_to_string([b'\xe9'], 'latin1') == 'é'
112 assert iter_decode_to_string([b''], 'latin1') == '' 113 assert iter_decode_to_string([b'\xe9'], 'latin1') == 'é' 114 assert iter_decode_to_string([b'hello'], 'latin1') == 'hello'
113 assert iter_decode_to_string([b'\xe9'], 'latin1') == 'é' 114 assert iter_decode_to_string([b'hello'], 'latin1') == 'hello' 115 assert iter_decode_to_string([b'he', b'llo'], 'latin1') == 'hello'
114 assert iter_decode_to_string([b'hello'], 'latin1') == 'hello' 115 assert iter_decode_to_string([b'he', b'llo'], 'latin1') == 'hello' 116 assert iter_decode_to_string([b'hell', b'o'], 'latin1') == 'hello'
115 assert iter_decode_to_string([b'he', b'llo'], 'latin1') == 'hello' 116 assert iter_decode_to_string([b'hell', b'o'], 'latin1') == 'hello' 117 assert iter_decode_to_string([b'\xc3\xa9'], 'latin1') == 'é'
116 assert iter_decode_to_string([b'hell', b'o'], 'latin1') == 'hello' 117 assert iter_decode_to_string([b'\xc3\xa9'], 'latin1') == 'é' 118 assert iter_decode_to_string([b'\xEF\xBB\xBF\xc3\xa9'], 'latin1') == 'é'
117 assert iter_decode_to_string([b'\xc3\xa9'], 'latin1') == 'é' 118 assert iter_decode_to_string([b'\xEF\xBB\xBF\xc3\xa9'], 'latin1') == 'é' 119 assert iter_decode_to_string([
118 assert iter_decode_to_string([b'\xEF\xBB\xBF\xc3\xa9'], 'latin1') == 'é' 119 assert iter_decode_to_string([ 120 b'\xEF\xBB\xBF', b'\xc3', b'\xa9'], 'latin1') == 'é' 121 assert iter_decode_to_string([
120 b'\xEF\xBB\xBF', b'\xc3', b'\xa9'], 'latin1') == 'é' 121 assert iter_decode_to_string([ 122 b'\xEF\xBB\xBF', b'a', b'\xc3'], 'latin1') == 'a\uFFFD' 123 assert iter_decode_to_string([
122 b'\xEF\xBB\xBF', b'a', b'\xc3'], 'latin1') == 'a\uFFFD' 123 assert iter_decode_to_string([ 124 b'', b'\xEF', b'', b'', b'\xBB\xBF\xc3', b'\xa9'], 'latin1') == 'é' 125 assert iter_decode_to_string([b'\xEF\xBB\xBF'], 'latin1') == ''
124 b'', b'\xEF', b'', b'', b'\xBB\xBF\xc3', b'\xa9'], 'latin1') == 'é' 125 assert iter_decode_to_string([b'\xEF\xBB\xBF'], 'latin1') == '' 126 assert iter_decode_to_string([b'\xEF\xBB'], 'latin1') == 'ï»'
125 assert iter_decode_to_string([b'\xEF\xBB\xBF'], 'latin1') == '' 126 assert iter_decode_to_string([b'\xEF\xBB'], 'latin1') == 'ï»' 127 assert iter_decode_to_string([b'\xFE\xFF\x00\xe9'], 'latin1') == 'é'
126 assert iter_decode_to_string([b'\xEF\xBB'], 'latin1') == 'ï»' 127 assert iter_decode_to_string([b'\xFE\xFF\x00\xe9'], 'latin1') == 'é' 128 assert iter_decode_to_string([b'\xFF\xFE\xe9\x00'], 'latin1') == 'é'
127 assert iter_decode_to_string([b'\xFE\xFF\x00\xe9'], 'latin1') == 'é' 128 assert iter_decode_to_string([b'\xFF\xFE\xe9\x00'], 'latin1') == 'é' 129 assert iter_decode_to_string([
128 assert iter_decode_to_string([b'\xFF\xFE\xe9\x00'], 'latin1') == 'é' 129 assert iter_decode_to_string([ 130 b'', b'\xFF', b'', b'', b'\xFE\xe9', b'\x00'], 'latin1') == 'é' 131 assert iter_decode_to_string([
130 b'', b'\xFF', b'', b'', b'\xFE\xe9', b'\x00'], 'latin1') == 'é' 131 assert iter_decode_to_string([ 132 b'', b'h\xe9', b'llo'], 'x-user-defined') == 'h\uF7E9llo' 133
135 def test_iter_encode(): 136 assert b''.join(iter_encode([], 'latin1')) == b'' 137 assert b''.join(iter_encode([''], 'latin1')) == b''
136 assert b''.join(iter_encode([], 'latin1')) == b'' 137 assert b''.join(iter_encode([''], 'latin1')) == b'' 138 assert b''.join(iter_encode(['é'], 'latin1')) == b'\xe9'
137 assert b''.join(iter_encode([''], 'latin1')) == b'' 138 assert b''.join(iter_encode(['é'], 'latin1')) == b'\xe9' 139 assert b''.join(iter_encode(['', 'é', '', ''], 'latin1')) == b'\xe9'
138 assert b''.join(iter_encode(['é'], 'latin1')) == b'\xe9' 139 assert b''.join(iter_encode(['', 'é', '', ''], 'latin1')) == b'\xe9' 140 assert b''.join(iter_encode(['', 'é', '', ''], 'utf-16')) == b'\xe9\x00'
139 assert b''.join(iter_encode(['', 'é', '', ''], 'latin1')) == b'\xe9' 140 assert b''.join(iter_encode(['', 'é', '', ''], 'utf-16')) == b'\xe9\x00' 141 assert b''.join(iter_encode(['', 'é', '', ''], 'utf-16le')) == b'\xe9\x00'
140 assert b''.join(iter_encode(['', 'é', '', ''], 'utf-16')) == b'\xe9\x00' 141 assert b''.join(iter_encode(['', 'é', '', ''], 'utf-16le')) == b'\xe9\x00' 142 assert b''.join(iter_encode(['', 'é', '', ''], 'utf-16be')) == b'\x00\xe9'
141 assert b''.join(iter_encode(['', 'é', '', ''], 'utf-16le')) == b'\xe9\x00' 142 assert b''.join(iter_encode(['', 'é', '', ''], 'utf-16be')) == b'\x00\xe9' 143 assert b''.join(iter_encode([
142 assert b''.join(iter_encode(['', 'é', '', ''], 'utf-16be')) == b'\x00\xe9' 143 assert b''.join(iter_encode([ 144 '', 'h\uF7E9', '', 'llo'], 'x-user-defined')) == b'h\xe9llo' 145
151 decoded = 'aa'
152 assert decode(encoded, 'x-user-defined') == (decoded, lookup('x-user-defined'))
153 assert encode(decoded, 'x-user-defined') == encoded
152 assert decode(encoded, 'x-user-defined') == (decoded, lookup('x-user-defined'))
153 assert encode(decoded, 'x-user-defined') == encoded
1 import os 2 import subprocess 3 import sys
8 env = {**os.environ, **{"PIP_YES": "true", "PIP_DISABLE_PIP_VERSION_CHECK": "true"}}
9 result = subprocess.check_output(
10 [python_location, "-m", "pip"] + list(args), cwd=cwd, env=env
11 )
12 return result.decode()
70 name = unquote(name)
71 assert name, f"URL {self._url!r} produced no filename"
72 return name
142 def _url_to_path(url):
143 assert url.startswith(
144 "file:"
145 ), f"You can only turn file: urls into filenames (not {url!r})"
146
1 import subprocess 2 3 from pip_api._call import call
145 version_string = os.confstr("CS_GNU_LIBC_VERSION")
146 assert version_string is not None
147 _, version = version_string.split()
12 import struct 13 import subprocess 14 import sys
105 return None 106 proc = subprocess.run([ld], stderr=subprocess.PIPE, universal_newlines=True) 107 return _parse_musl_version(proc.stderr)
129 plat = sysconfig.get_platform()
130 assert plat.startswith("linux-"), "not linux"
131
151 152 assert isinstance(marker, (list, tuple, str)) 153
225 for marker in markers: 226 assert isinstance(marker, (list, tuple, str)) 227
241 else: 242 assert marker in ["and", "or"] 243 if marker == "or":
3227 return super(Word, self).__str__() 3228 except Exception: 3229 pass 3230
3372 return super(Regex, self).__str__() 3373 except Exception: 3374 pass 3375
3551 return super(QuotedString, self).__str__() 3552 except Exception: 3553 pass 3554
3624 return super(CharsNotIn, self).__str__() 3625 except Exception: 3626 pass 3627
3920 return super(ParseExpression, self).__str__() 3921 except Exception: 3922 pass 3923
4508 return super(ParseElementEnhance, self).__str__() 4509 except Exception: 4510 pass 4511
9 import shutil 10 import subprocess 11 import sys
40 try: 41 process = subprocess.run(cmd, check=True, stdout=subprocess.PIPE, stderr=subprocess.DEVNULL) 42 except subprocess.CalledProcessError as cpe: # pragma: no cover
7 import os 8 import subprocess 9 import sys
152 try: 153 subprocess.run( 154 fix_cmd, 155 check=True, 156 stdout=subprocess.DEVNULL, 157 stderr=subprocess.DEVNULL, 158 ) 159 except subprocess.CalledProcessError as cpe:
140 deps[i] = str(req) 141 assert req.marker is None or req.marker.evaluate() 142
6 import os.path 7 import subprocess 8 from collections.abc import Sequence
8 from collections.abc import Sequence 9 from subprocess import Popen 10
47 # more responsive. 48 with Popen(args, bufsize=0, stdout=subprocess.PIPE, stderr=subprocess.PIPE) as process: 49 # NOTE: We use `poll()` to control this loop instead of the `read()` call
14 """
15 assert False, f"unhandled type: {type(x).__name__}"
16
317 try: 318 assert isinstance(parsed, ParsedRequirement) 319 req = build_req_from_parsedreq(parsed)
686 result = ENCODING_RE.search(line)
687 assert result is not None
688 encoding = result.groups()[0].decode("ascii")
1183 else: 1184 assert isinstance(parsed_line, (InvalidRequirementLine, CommentRequirementLine,)) 1185 yield parsed_line
1218 1219 assert parsed_line.is_requirement 1220
1544 new_line.append(line) 1545 assert primary_line_number is not None 1546 yield primary_line_number, "".join(new_line)
1556 if new_line: 1557 assert primary_line_number is not None 1558 yield primary_line_number, "".join(new_line)
1621 """
1622 assert url.startswith(
1623 "file:"
1624 ), f"You can only turn file: urls into filenames (not {url!r})"
1625
1769 name = urllib.parse.unquote(name)
1770 assert name, f"URL {self._url!r} produced no filename"
1771 return name
1969 """ 1970 assert req is None or isinstance(req, Requirement), req 1971 self.req = req
1471 code = compile(source, script_filename, 'exec') 1472 exec(code, namespace, namespace) 1473 else:
1478 script_code = compile(script_text, script_filename, 'exec') 1479 exec(script_code, namespace, namespace) 1480
1 import os 2 import subprocess 3 import contextlib
30 if runner is None: 31 runner = functools.partial(subprocess.check_call, shell=True) 32 # In the tar command, use --strip-components=1 to strip the first path and
86 stdout = devnull if quiet else None 87 subprocess.check_call(cmd, stdout=stdout) 88 yield repo_dir
518 try: 519 return eval(use) 520 except TypeError:
3481 # Largest order statistic: https://en.wikipedia.org/wiki/Order_statistic 3482 W = exp(log(random()) / k) 3483
3485 # number with a geometric distribution. Sample it using random() and logs. 3486 next_index = k + floor(log(random()) / log(1 - W)) 3487
3490 if index == next_index: 3491 reservoir[randrange(k)] = element 3492 # The new W is the largest in a sample of k U(0, `old_W`) numbers
3492 # The new W is the largest in a sample of k U(0, `old_W`) numbers 3493 W *= exp(log(random()) / k) 3494 next_index += floor(log(random()) / log(1 - W)) + 1
3493 W *= exp(log(random()) / k) 3494 next_index += floor(log(random()) / log(1 - W)) + 1 3495
3503 # Log-transform for numerical stability for weights that are small/large 3504 weight_keys = (log(random()) / weight for weight in weights) 3505
3513 smallest_weight_key, _ = reservoir[0] 3514 weights_to_skip = log(random()) / smallest_weight_key 3515
3521 t_w = exp(weight * smallest_weight_key) 3522 r_2 = uniform(t_w, 1) # generate U(t_w, 1) 3523 weight_key = log(r_2) / weight
3525 smallest_weight_key, _ = reservoir[0] 3526 weights_to_skip = log(random()) / smallest_weight_key 3527 else:
504 pools = [tuple(pool) for pool in args] * repeat 505 return tuple(choice(pool) for pool in pools) 506
554 n = len(pool) 555 indices = sorted(randrange(n) for i in range(r)) 556 return tuple(pool[i] for i in indices)
145 version_string = os.confstr("CS_GNU_LIBC_VERSION")
146 assert version_string is not None
147 _, version = version_string.split()
12 import struct 13 import subprocess 14 import sys
105 return None 106 proc = subprocess.run([ld], stderr=subprocess.PIPE, universal_newlines=True) 107 return _parse_musl_version(proc.stderr)
129 plat = sysconfig.get_platform()
130 assert plat.startswith("linux-"), "not linux"
131
151 152 assert isinstance(marker, (list, tuple, str)) 153
225 for marker in markers: 226 assert isinstance(marker, (list, tuple, str)) 227
241 else: 242 assert marker in ["and", "or"] 243 if marker == "or":
181 if not Path(path).exists():
182 path = f"/tmp/runtime-{getuid()}" # noqa: S108
183 else:
413 if specmodule_or_class is not None: 414 assert spec_opts is not None 415 self.set_specification(specmodule_or_class, spec_opts)
505 """ 506 assert not self.is_historic(), ( 507 "Cannot directly call a historic hook - use call_historic instead." 508 ) 509 self._verify_all_args_are_provided(kwargs)
526 """
527 assert self._call_history is not None
528 kwargs = kwargs or {}
546 :ref:`call_extra`.""" 547 assert not self.is_historic(), ( 548 "Cannot directly call a historic hook - use call_historic instead." 549 ) 550 self._verify_all_args_are_provided(kwargs)
577 if self.is_historic(): 578 assert self._call_history is not None 579 for kwargs, result_callback in self._call_history:
582 # XXX: remember firstresult isn't compat with historic 583 assert isinstance(res, list) 584 result_callback(res[0])
208 if name is None: 209 assert plugin is not None, "one of name or plugin needs to be specified" 210 name = self.get_name(plugin)
210 name = self.get_name(plugin) 211 assert name is not None, "plugin is not registered" 212
224 if self._name2plugin.get(name): 225 assert name is not None 226 del self._name2plugin[name]
335 336 assert hook.spec is not None 337 if hook.spec.warn_on_impl:
58 else: 59 assert isinstance(tags, tuple) 60 self._tags2proc[tags] = processor
42 ) 43 from xml.etree.ElementTree import Element, SubElement 44
973 # Will load any class already loaded assuming fully qualified name
974 self._type_ = eval(f'{mapped_array_type}[{results.get("array_of")}]')
975 self._concrete_type = eval(str(results.get('array_of')))
974 self._type_ = eval(f'{mapped_array_type}[{results.get("array_of")}]')
975 self._concrete_type = eval(str(results.get('array_of')))
976 except NameError:
1006 # Will load any class already loaded assuming fully qualified name
1007 self._type_ = eval(f'{mapped_array_type}[{results.get("array_of")}]')
1008 self._concrete_type = eval(str(results.get('array_of')))
1007 self._type_ = eval(f'{mapped_array_type}[{results.get("array_of")}]')
1008 self._concrete_type = eval(str(results.get('array_of')))
1009 except NameError:
23 if TYPE_CHECKING: # pragma: no cover 24 from xml.etree.ElementTree import Element 25
56 __TOKEN_MULTISTRING_SEARCH = re_compile(r' {2,}')
57 __TOKEN_MULTISTRING_REPLACE = ' '
58
753 break 754 assert len(indent) == depth + 1 755 if start[1] not in indent_chances:
1875 else: 1876 assert not kwargs 1877 self._io_error = None
2655 2656 def _parse_multi_options(options, split_token=','): 2657 r"""Split and strip and discard empties. 2658 2659 Turns the following: 2660 2661 A, 2662 B, 2663 2664 into ["A", "B"] 2665 """ 2666 if options: 2667 return [o.strip() for o in options.split(split_token) if o.strip()] 2668 else: 2669 return options 2670
13 import io 14 from subprocess import check_output 15 from .c_parser import CParser
41 # as \n for Python's purpose 42 text = check_output(path_list, universal_newlines=True) 43 except OSError as e:
63 """ 64 assert isinstance(switch_node, c_ast.Switch) 65 if not isinstance(switch_node.stmt, c_ast.Compound):
159 160 assert isinstance(parent, c_ast.TypeDecl) 161 grandparent.type = node.type
365 else: 366 assert name == 'enum' 367 members = None if n.values is None else n.values.enumerators
157 def _pop_scope(self): 158 assert len(self._scope_stack) > 1 159 self._scope_stack.pop()
416 for decl in decls: 417 assert decl['decl'] is not None 418 if is_typedef:
477 """ 478 if token == 'struct': 479 return c_ast.Struct
1037 spec = p[1] 1038 assert 'typedef' not in spec['storage'] 1039
599 try: 600 result = eval(expr) 601 except Exception:
214 else:
215 exec('import %s' % tabfile)
216 lextab = sys.modules[tabfile]
1038 pkgname = '.'.join(parts[:-1])
1039 exec('import %s' % pkgname)
1040 srcfile = getattr(sys.modules[pkgname], '__file__', '')
1524 def set_precedence(self, term, assoc, level): 1525 assert self.Productions == [None], 'Must call set_precedence() before add_production()' 1526 if term in self.Precedence:
1561 try: 1562 c = eval(s) 1563 if (len(c) > 1):
1981 else:
1982 exec('import %s' % module)
1983 parsetab = sys.modules[module]
1999 try: 2000 import cPickle as pickle 2001 except ImportError:
2001 except ImportError: 2002 import pickle 2003
2849 try: 2850 import cPickle as pickle 2851 except ImportError:
2851 except ImportError: 2852 import pickle 2853 with open(filename, 'wb') as outf:
3253 pkgname = '.'.join(parts[:-1])
3254 exec('import %s' % pkgname)
3255 srcfile = getattr(sys.modules[pkgname], '__file__', '')
552 else:
553 assert mode in {'before', 'after', 'plain'}, f"invalid mode: {mode!r}, expected 'before', 'after' or 'plain"
554 if n_positional == 2:
668 else:
669 assert mode == 'wrap', f"invalid mode: {mode!r}, expected 'plain' or 'wrap'"
670 if n_positional == 2:
761 # Make coverage happy as it can only get here in the last possible case 762 assert isinstance(func, cached_property) 763 func = func.func # type: ignore
162 """ 163 assert not self._used 164 schema = self._apply_to_root(schema)
807 if core_config.get('extra_fields_behavior') == 'allow':
808 assert cls.__mro__[0] is cls
809 assert cls.__mro__[-1] is object
808 assert cls.__mro__[0] is cls 809 assert cls.__mro__[-1] is object 810 for candidate_cls in cls.__mro__[:-1]:
1359 schema = self.generate_schema(annotation) 1360 assert schema['type'] != 'definitions' 1361 schema['ref'] = ref # type: ignore
1366 expected = list(get_literal_values(literal_type, type_check=False, unpack_type_aliases='eager'))
1367 assert expected, f'literal "expected" cannot be empty, obj={literal_type}'
1368 schema = core_schema.literal_schema(expected)
1579 1580 assert field.annotation is not None, 'field.annotation should not be None when generating a schema' 1581 update_field_from_config(self._config_wrapper, name, field)
2001 else: 2002 assert p.kind == Parameter.VAR_KEYWORD, p.kind 2003
2068 if parameter_mode is None: 2069 assert p.kind == Parameter.VAR_KEYWORD, p.kind 2070
2418 else: 2419 assert serializer.info.mode == 'plain' 2420 schema['serialization'] = core_schema.plain_serializer_function_ser_schema(
2581 else: 2582 assert validator.info.mode == 'after' 2583 if info_arg:
296 origin_type = getattr(typing, type_._name) 297 assert origin_type is not None 298
4 5 import subprocess 6 from pathlib import Path
16 try: 17 subprocess.check_output(['git', '--help']) 18 return True
16 try: 17 subprocess.check_output(['git', '--help']) 18 return True
26 """Get the SHA-1 of the HEAD of a git repository."""
27 return subprocess.check_output(['git', 'rev-parse', '--short', 'HEAD'], cwd=dir).decode('utf-8').strip()
26 """Get the SHA-1 of the HEAD of a git repository."""
27 return subprocess.check_output(['git', 'rev-parse', '--short', 'HEAD'], cwd=dir).decode('utf-8').strip()
638 639 assert cls.__pydantic_fields_complete__ 640
461 # Thus we assert here for type checking purposes: 462 assert isinstance(value, typing.ForwardRef) 463
359 360 assert isinstance(decimal_tuple.exponent, int) 361
144 """ 145 assert init is False, 'pydantic.dataclasses.dataclass only supports init=False' 146 assert validate_on_init is not False, 'validate_on_init=False is no longer supported'
145 assert init is False, 'pydantic.dataclasses.dataclass only supports init=False' 146 assert validate_on_init is not False, 'validate_on_init=False is no longer supported' 147
121 else: 122 assert p.kind == Parameter.VAR_KEYWORD, p.kind 123 self.v_kwargs_name = name
3 import json 4 import pickle 5 import warnings
53 bb = b if isinstance(b, bytes) else b.encode() # type: ignore 54 return pickle.loads(bb) 55 else:
386 else: 387 assert isinstance(step, _PipelineAnd) 388 s = cs.chain_schema([handler(step.left), handler(step.right)])
503 if s and s['type'] in {'str', 'list', 'tuple', 'set', 'frozenset', 'dict'}:
504 assert (
505 s['type'] == 'str'
506 or s['type'] == 'list'
507 or s['type'] == 'tuple'
508 or s['type'] == 'set'
509 or s['type'] == 'dict'
510 or s['type'] == 'frozenset'
511 )
512 s = s.copy()
548 def check_tz_aware(v: object) -> bool: 549 assert isinstance(v, datetime.datetime) 550 return v.tzinfo is not None
559 def check_tz_naive(v: object) -> bool: 560 assert isinstance(v, datetime.datetime) 561 return v.tzinfo is None
574 s = _apply_constraint(s, annotated_types.Lt(constraint.lt)) 575 assert s is not None 576 elif isinstance(constraint, annotated_types.Predicate):
621 else: 622 assert isinstance(constraint, Pattern) 623 if s and s['type'] == 'str':
628 def check_pattern(v: object) -> bool: 629 assert isinstance(v, str) 630 return constraint.match(v) is not None
2546 2547 assert not isinstance(cls.__pydantic_core_schema__, _mock_val_ser.MockCoreSchema), 'this is a bug! please report it' 2548 return schema_generator_instance.generate(cls.__pydantic_core_schema__, mode=mode)
172 info_metaclass = ctx.cls.info.declared_metaclass 173 assert info_metaclass, "callback not passed from 'get_metaclass_hook'" 174 if getattr(info_metaclass.type, 'dataclass_transform_spec', None):
331 # Cannot be TupleType as current_info represents a Pydantic model: 332 assert isinstance(filled_with_typevars, Instance) 333 if force_typevars_invariant:
367 """Based on mypy.plugins.dataclasses.DataclassAttribute.serialize."""
368 assert self.type
369 return {
646 lhs = stmt.lvalues[0] 647 assert isinstance(lhs, NameExpr) # collect_field_or_class_var_from_stmt guarantees this 648 if isinstance(maybe_field, PydanticModelField):
830 super_info = t.type.get_containing_type_info('__set__')
831 assert super_info
832 if setter.type:
875 base_settings_node = self._api.lookup_fully_qualified(BASESETTINGS_FULLNAME).node 876 assert isinstance(base_settings_node, TypeInfo) 877 if '__init__' in base_settings_node.names:
878 base_settings_init_node = base_settings_node.names['__init__'].node 879 assert isinstance(base_settings_init_node, FuncDef) 880 if base_settings_init_node is not None and base_settings_init_node.type is not None:
881 func_type = base_settings_init_node.type 882 assert isinstance(func_type, CallableType) 883 for arg_idx, arg_name in enumerate(func_type.arg_names):
1310 for arg in args: 1311 assert arg.type_annotation, 'All arguments must be fully typed.' 1312 arg_types.append(arg.type_annotation)
1325 email = parts.normalized 1326 assert email is not None 1327 name = name or parts.local_part
725 self.core_schema.rebuild() 726 assert not isinstance(self.core_schema, _mock_val_ser.MockCoreSchema), 'this is a bug! please report it' 727 return schema_generator_instance.generate(self.core_schema, mode=mode)
779 adapter.core_schema.rebuild() 780 assert not isinstance(adapter.core_schema, _mock_val_ser.MockCoreSchema), ( 781 'this is a bug! please report it' 782 ) 783 inputs_.append((key, mode, adapter.core_schema))
211 def inner(f): # type: ignore 212 assert f not in RESOLVERS 213 RESOLVERS[typ] = f
257 else:
258 assert min_size == 0
259 pattern = rf'(\W(.{repeats}\W)?)?'
267 if cls.gt is not None: 268 assert min_value is None, 'Set `gt` or `ge`, but not both' 269 min_value = cls.gt
270 if cls.lt is not None: 271 assert max_value is None, 'Set `lt` or `le`, but not both' 272 max_value = cls.lt
288 if cls.gt is not None: 289 assert min_value is None, 'Set `gt` or `ge`, but not both' 290 min_value = cls.gt
292 if cls.lt is not None: 293 assert max_value is None, 'Set `lt` or `le`, but not both' 294 max_value = cls.lt
304 if max_value is not None: 305 assert max_value >= cls.multiple_of, 'Cannot build model with max value smaller than multiple of' 306 max_value = math.floor(max_value / cls.multiple_of)
317 if cls.gt is not None: 318 assert min_value is None, 'Set `gt` or `ge`, but not both' 319 min_value = cls.gt + 1
320 if cls.lt is not None: 321 assert max_value is None, 'Set `lt` or `le`, but not both' 322 max_value = cls.lt - 1
338 if cls.ge is not None: 339 assert cls.gt is None, 'Set `gt` or `ge`, but not both' 340 min_value = cls.ge
345 if cls.le is not None: 346 assert cls.lt is None, 'Set `lt` or `le`, but not both' 347 max_value = cls.le
376 else:
377 assert min_size == 0
378 strategy = st.from_regex(rf'(\W(.{repeats}\W)?)?')
88 ) 89 assert each_item is False, '"each_item" and "whole" conflict, remove "whole"' 90 each_item = not whole
104 else: 105 assert p.kind == Parameter.VAR_KEYWORD, p.kind 106 self.v_kwargs_name = name
762 """ 763 assert self.discriminator_key is not None 764
767
768 assert self.sub_fields is not None
769 sub_fields_mapping: Dict[str, 'ModelField'] = {}
852 ) -> 'ValidateReturn': 853 assert self.type_.__class__ is not DeferredType 854
855 if self.type_.__class__ is ForwardRef: 856 assert cls is not None 857 raise ConfigError(
1102 ) -> 'ValidateReturn': 1103 assert self.discriminator_key is not None 1104 assert self.discriminator_alias is not None
1103 assert self.discriminator_key is not None 1104 assert self.discriminator_alias is not None 1105
1123 if self.sub_fields_mapping is None: 1124 assert cls is not None 1125 raise ConfigError(
1134 # TypeError: `discriminator_value` is unhashable. 1135 assert self.sub_fields_mapping is not None 1136 return v, ErrorWrapper(
1178 elif self.shape == SHAPE_GENERIC:
1179 assert self.sub_fields
1180 t = '{}[{}]'.format(
292 origin_type = getattr(typing, type_._name) 293 assert origin_type is not None 294 # PEP-604 syntax (Ex.: list | str) is represented with a types.UnionType object that does not have __getitem__.
388 if key not in fields: 389 assert field.type_.__class__ is not DeferredType 390 # https://github.com/nedbat/coveragepy/issues/198
392 393 assert field.type_.__class__ is DeferredType, field.type_.__class__ 394
166 info_metaclass = ctx.cls.info.declared_metaclass 167 assert info_metaclass, "callback not passed from 'get_metaclass_hook'" 168 if getattr(info_metaclass.type, 'dataclass_transform_spec', None):
181 182 assert ctx.callee_arg_names[0] == 'default', '"default" is no longer first argument in Field()' 183 assert ctx.callee_arg_names[1] == 'default_factory', '"default_factory" is no longer second argument in Field()'
182 assert ctx.callee_arg_names[0] == 'default', '"default" is no longer first argument in Field()' 183 assert ctx.callee_arg_names[1] == 'default_factory', '"default_factory" is no longer second argument in Field()' 184 default_args = ctx.args[0]
859 for arg in args: 860 assert arg.type_annotation, 'All arguments must be fully typed.' 861 arg_types.append(arg.type_annotation)
276 # the regex should always match, if it doesn't please report with details of the URL tried 277 assert m, 'URL regex failed unexpectedly' 278
364 d = int_domain_regex().fullmatch(host)
365 assert d is not None
366 tld = d.group('tld')
737 email = parts.normalized 738 assert email is not None 739 name = name or parts.local_part
1 import json 2 import pickle 3 from enum import Enum
41 bb = b if isinstance(b, bytes) else b.encode() 42 return pickle.loads(bb) 43 else:
525 else:
526 assert field.shape in {SHAPE_SINGLETON, SHAPE_GENERIC}, field.shape
527 f_schema, f_definitions, f_nested_models = field_singleton_schema(
714 if field_has_discriminator: 715 assert field.sub_fields_mapping is not None 716
194 try:
195 eval('__IPYTHON__')
196 except NameError:
664 """ 665 assert p.exists(), 'path does not exist' 666 for method, name in path_types.items():
35 try: 36 assert not isinstance(partial.func, partial), 'Partial of partial' 37 _check_function_type(function.func)
80 match = regex.match(string, pos) 81 assert match is not None 82 return match
346 # A dot should only appear in the name when it is a submodule import
347 assert '.' in name and (not source or isinstance(source, ast.Import))
348 package_name = name.split('.')[0]
802 def futuresAllowed(self, value): 803 assert value is False 804 if isinstance(self.scope, ModuleScope):
815 def annotationsFutureEnabled(self, value): 816 assert value is True 817 assert isinstance(self.scope, ModuleScope)
816 assert value is True 817 assert isinstance(self.scope, ModuleScope) 818 self.scope._annotations_future_enabled = True
9 import shutil 10 import subprocess 11 import tempfile
109 def makeEmptyFile(self, *parts): 110 assert parts 111 fpath = os.path.join(self.tempdir, *parts)
397 def evaluate(source): 398 exec(source) 399 try:
696 if stdin:
697 p = subprocess.Popen(command, env=env, stdin=subprocess.PIPE,
698 stdout=subprocess.PIPE, stderr=subprocess.PIPE)
699 (stdout, stderr) = p.communicate(stdin.encode('ascii'))
700 else: 701 p = subprocess.Popen(command, env=env, 702 stdout=subprocess.PIPE, stderr=subprocess.PIPE) 703 (stdout, stderr) = p.communicate()
17 binding = Importation('a', None, 'a')
18 assert binding.source_statement == 'import a'
19 assert str(binding) == 'a'
18 assert binding.source_statement == 'import a' 19 assert str(binding) == 'a' 20
22 binding = Importation('c', None, 'a')
23 assert binding.source_statement == 'import a as c'
24 assert str(binding) == 'a as c'
23 assert binding.source_statement == 'import a as c' 24 assert str(binding) == 'a as c' 25
27 binding = SubmoduleImportation('a.b', None)
28 assert binding.source_statement == 'import a.b'
29 assert str(binding) == 'a.b'
28 assert binding.source_statement == 'import a.b' 29 assert str(binding) == 'a.b' 30
33 binding = Importation('c', None, 'a.b')
34 assert binding.source_statement == 'import a.b as c'
35 assert str(binding) == 'a.b as c'
34 assert binding.source_statement == 'import a.b as c' 35 assert str(binding) == 'a.b as c' 36
38 binding = Importation('a', None, 'a.b')
39 assert binding.source_statement == 'import a.b as a'
40 assert str(binding) == 'a.b as a'
39 assert binding.source_statement == 'import a.b as a' 40 assert str(binding) == 'a.b as a' 41
43 binding = ImportationFrom('a', None, '.', 'a')
44 assert binding.source_statement == 'from . import a'
45 assert str(binding) == '.a'
44 assert binding.source_statement == 'from . import a' 45 assert str(binding) == '.a' 46
48 binding = ImportationFrom('a', None, '..', 'a')
49 assert binding.source_statement == 'from .. import a'
50 assert str(binding) == '..a'
49 assert binding.source_statement == 'from .. import a' 50 assert str(binding) == '..a' 51
53 binding = ImportationFrom('b', None, '..a', 'b')
54 assert binding.source_statement == 'from ..a import b'
55 assert str(binding) == '..a.b'
54 assert binding.source_statement == 'from ..a import b' 55 assert str(binding) == '..a.b' 56
58 binding = ImportationFrom('c', None, '..a', 'b')
59 assert binding.source_statement == 'from ..a import b as c'
60 assert str(binding) == '..a.b as c'
59 assert binding.source_statement == 'from ..a import b as c' 60 assert str(binding) == '..a.b as c' 61
63 binding = ImportationFrom('b', None, 'a', 'b')
64 assert binding.source_statement == 'from a import b'
65 assert str(binding) == 'a.b'
64 assert binding.source_statement == 'from a import b' 65 assert str(binding) == 'a.b' 66
68 binding = ImportationFrom('c', None, 'a.b', 'c')
69 assert binding.source_statement == 'from a.b import c'
70 assert str(binding) == 'a.b.c'
69 assert binding.source_statement == 'from a.b import c' 70 assert str(binding) == 'a.b.c' 71
73 binding = ImportationFrom('c', None, 'a', 'b')
74 assert binding.source_statement == 'from a import b as c'
75 assert str(binding) == 'a.b as c'
74 assert binding.source_statement == 'from a import b as c' 75 assert str(binding) == 'a.b as c' 76
78 binding = ImportationFrom('d', None, 'a.b', 'c')
79 assert binding.source_statement == 'from a.b import c as d'
80 assert str(binding) == 'a.b.c as d'
79 assert binding.source_statement == 'from a.b import c as d' 80 assert str(binding) == 'a.b.c as d' 81
83 binding = StarImportation('a.b', None)
84 assert binding.source_statement == 'from a.b import *'
85 assert str(binding) == 'a.b.*'
84 assert binding.source_statement == 'from a.b import *' 85 assert str(binding) == 'a.b.*' 86
88 binding = StarImportation('.b', None)
89 assert binding.source_statement == 'from .b import *'
90 assert str(binding) == '.b.*'
89 assert binding.source_statement == 'from .b import *' 90 assert str(binding) == '.b.*' 91
93 binding = FutureImportation('print_function', None, None)
94 assert binding.source_statement == 'from __future__ import print_function'
95 assert str(binding) == '__future__.print_function'
94 assert binding.source_statement == 'from __future__ import print_function' 95 assert str(binding) == '__future__.print_function' 96
124 error = checker.messages[0]
125 assert error.message == '%r imported but unused'
126 assert error.message_args == ('.fu', )
125 assert error.message == '%r imported but unused'
126 assert error.message_args == ('.fu', )
127
130 error = checker.messages[0]
131 assert error.message == '%r imported but unused'
132 assert error.message_args == ('.fu as baz', )
131 assert error.message == '%r imported but unused'
132 assert error.message_args == ('.fu as baz', )
133
742 error = checker.messages[0]
743 assert error.message.startswith("'from %s import *' used; unable ")
744 assert error.message_args == ('fu', )
743 assert error.message.startswith("'from %s import *' used; unable ")
744 assert error.message_args == ('fu', )
745
746 error = checker.messages[1]
747 assert error.message == '%r imported but unused'
748 assert error.message_args == ('fu.*', )
747 assert error.message == '%r imported but unused'
748 assert error.message_args == ('fu.*', )
749
763 error = checker.messages[0]
764 assert error.message.startswith("'from %s import *' used; unable ")
765 assert error.message_args == ('.fu', )
764 assert error.message.startswith("'from %s import *' used; unable ")
765 assert error.message_args == ('.fu', )
766
767 error = checker.messages[1]
768 assert error.message == '%r imported but unused'
769 assert error.message_args == ('.fu.*', )
768 assert error.message == '%r imported but unused'
769 assert error.message_args == ('.fu.*', )
770
774 error = checker.messages[0]
775 assert error.message.startswith("'from %s import *' used; unable ")
776 assert error.message_args == ('..', )
775 assert error.message.startswith("'from %s import *' used; unable ")
776 assert error.message_args == ('..', )
777
778 error = checker.messages[1]
779 assert error.message == '%r imported but unused'
780 assert error.message_args == ('from .. import *', )
779 assert error.message == '%r imported but unused'
780 assert error.message_args == ('from .. import *', )
781
797 error = checker.messages[0]
798 assert error.message == "'from %s import *' only allowed at module level"
799 assert error.message_args == ('..', )
798 assert error.message == "'from %s import *' only allowed at module level"
799 assert error.message_args == ('..', )
800
887 error = checker.messages[0]
888 assert error.message == '%r imported but unused'
889 assert error.message_args == ('fu.bar', )
888 assert error.message == '%r imported but unused'
889 assert error.message_args == ('fu.bar', )
890 assert error.lineno == 5 if self.withDoctest else 3
889 assert error.message_args == ('fu.bar', )
890 assert error.lineno == 5 if self.withDoctest else 3
891
521 width = shutil.get_terminal_size().columns - 2 522 except Exception: 523 pass 524 argparse.HelpFormatter.__init__(self, prog, indent_increment,
102 with open(filename, 'rb') as f: 103 exec(f.read(), custom_namespace) 104 # Retrieve the class `formattername` from that namespace
16 17 import subprocess 18
92 def _get_nix_font_path(self, name, style):
93 proc = subprocess.Popen(['fc-list', f"{name}:style={style}", 'file'],
94 stdout=subprocess.PIPE, stderr=None)
95 stdout, _ = proc.communicate()
92 def _get_nix_font_path(self, name, style):
93 proc = subprocess.Popen(['fc-list', f"{name}:style={style}", 'file'],
94 stdout=subprocess.PIPE, stderr=None)
95 stdout, _ = proc.communicate()
511 """Preprocess the token component of a token definition."""
512 assert type(token) is _TokenType or callable(token), \
513 f'token type must be simple type or callable, not {token!r}'
514 return token
528 else:
529 assert False, f'unknown new state {new_state!r}'
530 elif isinstance(new_state, combined):
535 for istate in new_state:
536 assert istate != new_state, f'circular state ref {istate!r}'
537 itokens.extend(cls._process_state(unprocessed,
543 for istate in new_state:
544 assert (istate in unprocessed or
545 istate in ('#pop', '#push')), \
546 'unknown new state ' + istate
547 return new_state
548 else:
549 assert False, f'unknown new state def {new_state!r}'
550
552 """Preprocess a single state definition."""
553 assert isinstance(state, str), f"wrong state name {state!r}"
554 assert state[0] != '#', f"invalid state name {state!r}"
553 assert isinstance(state, str), f"wrong state name {state!r}"
554 assert state[0] != '#', f"invalid state name {state!r}"
555 if state in processed:
561 # it's a state reference
562 assert tdef != state, f"circular state reference {state!r}"
563 tokens.extend(cls._process_state(unprocessed, processed,
575
576 assert type(tdef) is tuple, f"wrong rule def {tdef!r}"
577
741 else:
742 assert False, f"wrong state def: {new_state!r}"
743 statetokens = tokendefs[statestack[-1]]
828 else:
829 assert False, f"wrong state def: {new_state!r}"
830 statetokens = tokendefs[ctx.stack[-1]]
153 with open(filename, 'rb') as f: 154 exec(f.read(), custom_namespace) 155 # Retrieve the class `lexername` from that namespace
224 def get_newest_version():
225 f = urlopen('http://www.lua.org/manual/')
226 r = re.compile(r'^<A HREF="(\d\.\d)/">(Lua )?\1</A>')
232 def get_lua_functions(version):
233 f = urlopen(f'http://www.lua.org/manual/{version}/')
234 r = re.compile(r'^<A HREF="manual.html#pdf-(?!lua|LUA)([^:]+)">\1</A>')
1247 # Pull content from lex.h.
1248 lex_file = urlopen(LEX_URL).read().decode('utf8', errors='ignore')
1249 keywords = parse_lex_keywords(lex_file)
1253 # Parse content in item_create.cc.
1254 item_create_file = urlopen(ITEM_CREATE_URL).read().decode('utf8', errors='ignore')
1255 functions.update(parse_item_create_functions(item_create_file))
3298 def get_php_references(): 3299 download = urlretrieve(PHP_MANUAL_URL) 3300 with tarfile.open(download[0]) as tar:
3300 with tarfile.open(download[0]) as tar:
3301 tar.extractall()
3302 yield from glob.glob(f"{PHP_MANUAL_DIR}{PHP_REFERENCE_GLOB}")
641 def update_myself():
642 content = urlopen(DATATYPES_URL).read().decode('utf-8', errors='ignore')
643 data_file = list(content.splitlines())
646
647 content = urlopen(KEYWORDS_URL).read().decode('utf-8', errors='ignore')
648 keywords = parse_keywords(content)
3054 if __name__ == '__main__': # pragma: no cover 3055 import subprocess 3056 from pygments.util import format_lines, duplicates_removed
3060 def extract_completion(var_type): 3061 s = subprocess.Popen(['scilab', '-nwni'], stdin=subprocess.PIPE, 3062 stdout=subprocess.PIPE, stderr=subprocess.PIPE) 3063 output = s.communicate(f'''\
3060 def extract_completion(var_type): 3061 s = subprocess.Popen(['scilab', '-nwni'], stdin=subprocess.PIPE, 3062 stdout=subprocess.PIPE, stderr=subprocess.PIPE) 3063 output = s.communicate(f'''\
727 for token in Inform6Lexer.tokens: 728 if token == 'root': 729 continue
16 17 jsonnet_token = r'[^\W\d]\w*' 18 jsonnet_function_token = jsonnet_token + r'(?=\()'
42 else: 43 assert backslash == "disallowed" 44 return words(names, prefix, suffix)
49 # Use within verbose regexes 50 token_end = r''' 51 (?= 52 \s # whitespace 53 | ; # comment 54 | \#[;|!] # fancy comments 55 | [)\]] # end delimiters 56 | $ # end of file 57 ) 58 ''' 59
3040 # ...so, express it like this 3041 _token_end = r'(?=\s|#|[)\]]|$)' 3042
333 _id = r'[A-Za-z]\w*' 334 _TOKEN_REF = r'[A-Z]\w*' 335 _RULE_REF = r'[a-z]\w*'
1476 result += 0.01 1477 assert 0.0 <= result <= 1.0 1478 return result
1560 result = 1.0 1561 assert 0.0 <= result <= 1.0 1562 return result
235 else: 236 assert 0, "SQL keywords not found" 237
78 return text
79 assert False, f"wrong color format {text!r}"
80
386 """ 387 assert isinstance(test, nodes.Name) 388 emit = False
706 # or as a type annotation. We only want to check the former. 707 assert node is not None 708 return not isinstance(node.parent, nodes.AnnAssign)
158 basename = get_import_name(node, node.modname) 159 assert basename is not None, "Module name should not be None" 160 self.check_deprecated_module(node, basename)
344 return 345 if keyword_token == "in": 346 # This special case was added in https://github.com/pylint-dev/pylint/pull/4948
513 line = node.fromlineno 514 assert line, node 515 if prev_line == line and self._visited_lines.get(line) != 2:
523 tolineno = node.tolineno 524 assert tolineno, node 525 lines: list[str] = []
168 for prefix in mod.split("."):
169 assert isinstance(node[0], dict)
170 node = node[0].setdefault(prefix, ({}, [])) # type: ignore[arg-type,assignment]
170 node = node[0].setdefault(prefix, ({}, [])) # type: ignore[arg-type,assignment]
171 assert isinstance(node[1], list)
172 node[1].extend(files)
695 trailing_comma_tuple_enabled_once = True 696 if token_string == "elif": 697 # AST exists by the time process_tokens is called, so
573 # If this blows, something is clearly wrong 574 assert enum_def is not None, "enum_def unexpectedly None" 575 else:
1556 else: 1557 assert isinstance(arg, nodes.AssignName) 1558 name = arg.name
1427 """Leave module: check globals.""" 1428 assert len(self._to_consume) == 1 1429
1682 # name node from an astroid built from live code, skip
1683 assert not stmt.root().file.endswith(".py")
1684 return
19 # Make sure subparsers are initialized properly 20 assert linter._arg_parser._subparsers 21 subparser_action = linter._arg_parser._subparsers._group_actions[0]
21 subparser_action = linter._arg_parser._subparsers._group_actions[0] 22 assert isinstance(subparser_action, argparse._SubParsersAction) 23
24 for name, subparser in subparser_action.choices.items(): 25 assert isinstance(subparser, argparse.ArgumentParser) 26 if name == command:
153 # argparse initializes all options from the checker 154 assert argument.kwargs["old_names"] 155 for old_name in argument.kwargs["old_names"]:
248 group_name = group.title 249 assert group_name 250 if group_name in skipsections:
344 help_msg = optdict.get("help", "")
345 assert isinstance(help_msg, str)
346 help_text = textwrap.wrap(help_msg, width=79)
361 if "kwargs" in optdict: 362 assert isinstance(optdict["kwargs"], dict) 363 if "new_names" in optdict["kwargs"]:
385 386 assert group.title 387 if group_table:
137 ) -> None: 138 assert isinstance(values, (list, tuple)) 139 values_to_print: list[str] = []
140 for msg in values: 141 assert isinstance(msg, str) 142 values_to_print += utils._check_csv(msg)
357 ) -> None: 358 assert isinstance(values, (tuple, list)) 359 for msgid in utils._check_csv(values[0]):
420 ) -> None: 421 assert isinstance(values, (tuple, list)) 422 assert isinstance(
421 assert isinstance(values, (tuple, list)) 422 assert isinstance( 423 values[0], str 424 ), "'output-format' should be a comma separated string of reporters" 425 self.linter._load_reporters(values[0])
57 if "init-hook" in config_data: 58 exec(utils._unquote(config_data["init-hook"])) # pylint: disable=exec-used 59
33 ) -> None: 34 assert old_names 35 self.old_names = old_names
55 ) -> None: 56 assert isinstance(values, list) 57 setattr(namespace, self.dest, values[0])
78 ) -> None: 79 assert new_names 80 self.new_names = new_names
100 ) -> None: 101 assert isinstance(values, list) 102 setattr(namespace, self.dest, values[0])
17 """Copied from argparse.ArgumentDefaultsHelpFormatter.""" 18 assert action.help 19 help_string = action.help
150 """ 151 assert value is not None 152 exec(value) # pylint: disable=exec-used
151 assert value is not None 152 exec(value) # pylint: disable=exec-used 153
156 """Set the rcfile.""" 157 assert value is not None 158 run._rcfile = value
162 """Set the output.""" 163 assert value is not None 164 run._output = value
168 """Add plugins to the list of loadable plugins.""" 169 assert value is not None 170 run._plugins.extend(utils._splitstrip(value))
173 def _set_verbose_mode(run: Run, value: str | None) -> None: 174 assert value is None 175 run.verbose = True
179 """Enable all extensions.""" 180 assert value is None 181 for filename in Path(extensions.__file__).parent.iterdir():
689 result = -1 < node_line_count < min_lines 690 assert isinstance( 691 result, bool 692 ), "Result of int comparison should have been a boolean" 693 return result
13 import shutil 14 import subprocess 15 import tempfile
54 if charset:
55 assert charset.lower() in {
56 "utf-8",
57 "iso-8859-1",
58 "latin1",
59 }, f"unsupported charset {charset}"
60 self.emit(f'charset="{charset}"')
114 if mapfile: 115 subprocess.run( 116 [ 117 self.renderer, 118 "-Tcmapx", 119 "-o", 120 mapfile, 121 "-T", 122 target, 123 dot_sourcepath, 124 "-o", 125 outputfile, 126 ], 127 check=True, 128 ) 129 else:
129 else: 130 subprocess.run( 131 [self.renderer, "-T", target, dot_sourcepath, "-o", outputfile], 132 check=True, 133 ) 134 os.unlink(dot_sourcepath)
6 7 import pickle 8 import sys
41 with open(data_file, "rb") as stream: 42 data = pickle.load(stream) 43 if not isinstance(data, LinterStats):
70 if scope in {"module", "line"}:
71 assert isinstance(line, int) # should always be int inside module scope
72
150 """Do some tests and then iterate over message definitions to set state."""
151 assert scope in {"package", "module", "line"}
152
11 12 import dill 13
46 global _worker_linter # pylint: disable=global-statement 47 _worker_linter = dill.loads(linter) 48 assert _worker_linter
47 _worker_linter = dill.loads(linter) 48 assert _worker_linter 49
85 msgs = _worker_linter.reporter.messages 86 assert isinstance(_worker_linter.reporter, reporters.CollectingReporter) 87 _worker_linter.reporter.reset()
84 # See https://github.com/python/typeshed/pull/5623 for rationale behind assertion 85 assert isinstance(sys.stdin, TextIOWrapper) 86 sys.stdin = TextIOWrapper(sys.stdin.detach(), encoding="utf-8")
95 klass = getattr(module, class_name)
96 assert issubclass(klass, BaseReporter), f"{klass} is not a BaseReporter"
97 return klass # type: ignore[no-any-return]
1169 }
1170 note = eval(evaluation, {}, stats_dict) # pylint: disable=eval-used
1171 except Exception as ex: # pylint: disable=broad-except
174 """Create a diagram object.""" 175 assert node not in self._nodes 176 ent = ClassEntity(title, node)
308 """Create a diagram object.""" 309 assert node not in self._nodes 310 ent = PackageEntity(title, node)
9 import os 10 import subprocess 11 import tempfile
78 if self.charset:
79 assert (
80 self.charset.lower() in ALLOWED_CHARSETS
81 ), f"unsupported charset {self.charset}"
82 self.emit(f'charset="{self.charset}"')
182 if target not in graphviz_extensions: 183 subprocess.run( 184 ["dot", "-T", target, dot_sourcepath, "-o", outputfile], check=True 185 ) 186 os.unlink(dot_sourcepath)
182 if target not in graphviz_extensions: 183 subprocess.run( 184 ["dot", "-T", target, dot_sourcepath, "-o", outputfile], check=True 185 ) 186 os.unlink(dot_sourcepath)
11 import shutil 12 import subprocess 13 import sys
249 """ 250 dot_output = subprocess.run( 251 ["dot", "-T?"], capture_output=True, check=False, encoding="utf-8" 252 ) 253 match = re.match(
249 """ 250 dot_output = subprocess.run( 251 ["dot", "-T?"], capture_output=True, check=False, encoding="utf-8" 252 ) 253 match = re.match(
183 try:
184 note: int = eval( # pylint: disable=eval-used
185 evaluation, {}, {**counts_dict, "statement": stats.statement or 1}
186 )
187 except Exception as ex: # pylint: disable=broad-except
64 """Add a node to children.""" 65 assert child not in self.parents() 66 self.children.append(child)
75 """Return the ancestor nodes.""" 76 assert self.parent is not self 77 if self.parent is None:
189 super().__init__(children=children) 190 assert isinstance(cols, int) 191 self.cols = cols
143 ) 144 assert not self.packages[package].url.endswith( 145 ".git" 146 ), "You don't need the .git at the end of the github url." 147 comment += (
53 plural = "s" if len(fatal_msgs) > 1 else ""
54 assert (
55 not fatal_msgs
56 ), f"We encountered {len(fatal_msgs)} fatal error message{plural} (see log)."
57
63 64 assert len(messages) == len(got), msg 65
66 for expected_msg, gotten_msg in zip(messages, got): 67 assert expected_msg.msg_id == gotten_msg.msg_id, msg 68 assert expected_msg.node == gotten_msg.node, msg
67 assert expected_msg.msg_id == gotten_msg.msg_id, msg 68 assert expected_msg.node == gotten_msg.node, msg 69 assert expected_msg.args == gotten_msg.args, msg
68 assert expected_msg.node == gotten_msg.node, msg 69 assert expected_msg.args == gotten_msg.args, msg 70 assert expected_msg.confidence == gotten_msg.confidence, msg
69 assert expected_msg.args == gotten_msg.args, msg 70 assert expected_msg.confidence == gotten_msg.confidence, msg 71
75 76 assert expected_msg.line == gotten_msg.line, msg 77 assert expected_msg.col_offset == gotten_msg.col_offset, msg
76 assert expected_msg.line == gotten_msg.line, msg 77 assert expected_msg.col_offset == gotten_msg.col_offset, msg 78 assert expected_msg.end_line == gotten_msg.end_line, msg
77 assert expected_msg.col_offset == gotten_msg.col_offset, msg 78 assert expected_msg.end_line == gotten_msg.end_line, msg 79 assert expected_msg.end_col_offset == gotten_msg.end_col_offset, msg
78 assert expected_msg.end_line == gotten_msg.end_line, msg 79 assert expected_msg.end_col_offset == gotten_msg.end_col_offset, msg 80
89
90 assert (
91 name in POSSIBLE_TEST_OPTIONS
92 ), f"[testoptions]' can only contains one of {POSSIBLE_TEST_OPTIONS} and had '{name}'"
93 self.options[name] = conv(value) # type: ignore[literal-required]
238 for msg in messages:
239 assert (
240 msg.symbol != "fatal"
241 ), f"Pylint analysis failed because of '{msg.msg}'"
242 received_msgs[msg.line, msg.symbol] += 1
253 actual_messages, actual_output = self._get_actual() 254 assert ( 255 expected_messages == actual_messages 256 ), self.error_msg_for_unequal_messages( 257 actual_messages, expected_messages, actual_output 258 ) 259 self._check_output_text(expected_messages, expected_output, actual_output)
339 """ 340 assert expected_output == actual_output, self.error_msg_for_unequal_output( 341 expected_output, actual_output 342 )
92 # symbol, line, column, end_line, end_column, node, msg, confidences 93 assert len(row) == 8 94 return cls(
54 result += get_rst_title(title, "~")
55 assert isinstance(options, list)
56 result += f"{get_rst_section(None, options)}\n"
191 """Set status (enabled/disable) for a given message at a given line.""" 192 assert line > 0 193 if scope != "line":
216 if state_scope == MSG_STATE_SCOPE_MODULE: 217 assert isinstance(line, int) # should always be int inside module scope 218
92 assignment_required = False 93 previous_token = "" 94
129 if help_opt: 130 assert isinstance(help_opt, str) 131 formatted_help = normalize_text(help_opt, indent=" ")
313 if "kwargs" in optdict: 314 assert isinstance(optdict["kwargs"], dict) 315 if "new_names" in optdict["kwargs"]:
319 if help_opt: 320 assert isinstance(help_opt, str) 321 help_opt = normalize_text(help_opt, indent="# ")
363 os.getpid(), 364 random.randint(0, 999999), # noqa: S311 365 output['cov_worker_node_id'],
364 self.cov_total = 0 365 assert self.cov_total is not None, 'Test coverage should never be `None`' 366 cov_fail_under = self.options.cov_fail_under
143 def leave_fstring(self) -> None: 144 assert self.state == FStringState.at_fstring_end 145 self.state = self.stack.pop()
159 def consume_rbrace(self) -> None: 160 assert ( 161 self.state == FStringState.in_fstring_expr 162 or self.state == FStringState.in_fstring_expr_modifier 163 ) 164
173 def consume_colon(self) -> None: 174 assert self.state == FStringState.in_fstring_expr 175 self.state = FStringState.in_fstring_expr_modifier
214 def peek(self) -> str: 215 assert self.is_in_bounds() 216 return self.source[self.current_index]
218 def peek_next(self) -> str: 219 assert self.current_index + 1 < len(self.source) 220 return self.source[self.current_index + 1]
282 token_str = self.source[self.prev_index : self.current_index] 283 if token_str == "\r ": 284 self.all_whitespace_on_this_line = False
312 if self.fstring_prefix is not None: 313 assert self.fstring_quote is not None 314 self.fstring_prefix_quote_stack.append(
322 if self.fstring_prefix is None: 323 assert self.fstring_quote is None 324 raise Underflow
560 if self.fstring_state.state == FStringState.at_fstring_middle: 561 assert self.fstring_quote is not None 562 is_single_quote = len(self.fstring_quote) == 1
575 # but DON'T escape `\N{` in raw f-strings.
576 assert self.fstring_prefix is not None
577 if (
606 607 assert self.fstring_quote is not None 608 if self.match(self.fstring_quote):
627 if self.fstring_state.state == FStringState.at_fstring_end: 628 assert self.fstring_quote is not None 629 for _ in range(len(self.fstring_quote)):
639 char = self.source[self.current_index]
640 assert self.fstring_quote is not None
641 if (char == "\n" or char == "{") and len(self.fstring_quote) == 1:
2 import copy 3 import pickle 4 import regex
3774 p = pickle.dumps(r)
3775 r = pickle.loads(p)
3776 self.assertEqual(r.match('foo').span(), (0, 3))
59 urllib3_version = urllib3_version.split(".")
60 assert urllib3_version != ["dev"] # Verify urllib3 isn't installed from git.
61
69 # urllib3 >= 1.21.1 70 assert major >= 1 71 if major == 1:
71 if major == 1: 72 assert minor >= 21 73
78 # chardet_version >= 3.0.2, < 6.0.0 79 assert (3, 0, 2) <= (major, minor, patch) < (6, 0, 0) 80 elif charset_normalizer_version:
83 # charset_normalizer >= 2.0.0 < 4.0.0 84 assert (2, 0, 0) <= (major, minor, patch) < (4, 0, 0) 85 else:
44 """ 45 assert isinstance(u_string, str) 46 try:
147 x = x.encode("utf-8")
148 return hashlib.md5(x).hexdigest()
149
155 x = x.encode("utf-8")
156 return hashlib.sha1(x).hexdigest()
157
204 205 cnonce = hashlib.sha1(s).hexdigest()[:16] 206 if _algorithm == "MD5-SESS":
12 """ 13 assert values, "1 or more values required" 14 for value in values:
122 total_ratio = sum(ratios) 123 assert total_ratio > 0, "Sum of ratios must be > 0" 124
434 435 assert fore is not None 436 assert back is not None
435 assert fore is not None 436 assert back is not None 437
564 """ 565 assert len(title) < 255, "Console title must be less than 255 characters" 566 SetConsoleTitle(title)
364 if self.type == ColorType.TRUECOLOR: 365 assert self.triplet is not None 366 return self.triplet
367 elif self.type == ColorType.EIGHT_BIT: 368 assert self.number is not None 369 return EIGHT_BIT_PALETTE[self.number]
370 elif self.type == ColorType.STANDARD: 371 assert self.number is not None 372 return theme.ansi_colors[self.number]
373 elif self.type == ColorType.WINDOWS: 374 assert self.number is not None 375 return WINDOWS_PALETTE[self.number]
376 else: # self.type == ColorType.DEFAULT: 377 assert self.number is None 378 return theme.foreground_color if foreground else theme.background_color
492 number = self.number 493 assert number is not None 494 fore, back = (30, 40) if number < 8 else (82, 92)
498 number = self.number 499 assert number is not None 500 fore, back = (30, 40) if number < 8 else (82, 92)
503 elif _type == ColorType.EIGHT_BIT:
504 assert self.number is not None
505 return ("38" if foreground else "48", "5", str(self.number))
507 else: # self.standard == ColorStandard.TRUECOLOR: 508 assert self.triplet is not None 509 red, green, blue = self.triplet
519 if system == ColorSystem.EIGHT_BIT and self.system == ColorSystem.TRUECOLOR: 520 assert self.triplet is not None 521 _h, l, s = rgb_to_hls(*self.triplet.normalized)
545 if self.system == ColorSystem.TRUECOLOR: 546 assert self.triplet is not None 547 triplet = self.triplet
548 else: # self.system == ColorSystem.EIGHT_BIT 549 assert self.number is not None 550 triplet = ColorTriplet(*EIGHT_BIT_PALETTE[self.number])
556 if self.system == ColorSystem.TRUECOLOR: 557 assert self.triplet is not None 558 triplet = self.triplet
559 else: # self.system == ColorSystem.EIGHT_BIT 560 assert self.number is not None 561 if self.number < 16:
572 """Parse six hex characters in to RGB triplet.""" 573 assert len(hex_color) == 6, "must be 6 characters" 574 color = ColorTriplet(
1148 1149 assert count >= 0, "count must be >= 0" 1150 self.print(NewLine(count))
1924 offset -= 1 1925 assert frame is not None 1926 return frame.f_code.co_filename, frame.f_lineno, frame.f_locals
2184 """ 2185 assert ( 2186 self.record 2187 ), "To export console contents set record=True in the constructor or instance" 2188
2240 """ 2241 assert ( 2242 self.record 2243 ), "To export console contents set record=True in the constructor or instance" 2244 fragments: List[str] = []
70 ) -> None: 71 assert refresh_per_second > 0, "refresh_per_second must be > 0" 72 self._renderable = renderable
376 time.sleep(0.4) 377 if random.randint(0, 10) < 1: 378 console.log(next(examples))
379 exchange_rate_dict[(select_exchange, exchange)] = 200 / ( 380 (random.random() * 320) + 1 381 )
141 exc_type, exc_value, exc_traceback = record.exc_info 142 assert exc_type is not None 143 assert exc_value is not None
142 assert exc_type is not None 143 assert exc_value is not None 144 traceback = Traceback.from_exception(
264 def on_child_close(self, context: MarkdownContext, child: MarkdownElement) -> bool: 265 assert isinstance(child, TableRowElement) 266 self.row = child
276 def on_child_close(self, context: MarkdownContext, child: MarkdownElement) -> bool: 277 assert isinstance(child, TableRowElement) 278 self.rows.append(child)
288 def on_child_close(self, context: MarkdownContext, child: MarkdownElement) -> bool: 289 assert isinstance(child, TableDataElement) 290 self.cells.append(child)
311 312 assert justify in get_args(JustifyMethod) 313 return cls(justify=justify)
337 def on_child_close(self, context: MarkdownContext, child: MarkdownElement) -> bool: 338 assert isinstance(child, ListItem) 339 self.items.append(child)
608 element = context.stack.pop()
609 assert isinstance(element, Link)
610 link_style = console.get_style("markdown.link", default="none")
197 console = console or get_console() 198 assert console is not None 199
202 if value is not None: 203 assert console is not None 204 builtins._ = None # type: ignore[attr-defined]
515 ) 516 assert self.node is not None 517 return self.node.check_length(start_length, max_length)
521 node = self.node 522 assert node is not None 523 whitespace = self.whitespace
523 whitespace = self.whitespace 524 assert node.children 525 if node.key_repr:
660 rich_repr_result = obj.__rich_repr__() 661 except Exception: 662 pass 663
1090 ) -> None: 1091 assert refresh_per_second > 0, "refresh_per_second must be > 0" 1092 self._lock = RLock()
1713 time.sleep(0.01) 1714 if random.randint(0, 100) < 1: 1715 progress.log(next(examples))
221 """ 222 assert self.choices is not None 223 if self.case_sensitive:
170 text, style, control = self 171 assert cut >= 0 172
3 from operator import attrgetter 4 from pickle import dumps, loads 5 from random import randint
197 self._link_id = (
198 f"{randint(0, 999999)}{hash(self._meta)}" if (link or meta) else ""
199 )
247 style._meta = dumps(meta)
248 style._link_id = f"{randint(0, 999999)}{hash(style._meta)}"
249 style._hash = None
470 """Get meta information (can not be changed after construction)."""
471 return {} if self._meta is None else cast(Dict[str, Any], loads(self._meta))
472
485 style._link = self._link
486 style._link_id = f"{randint(0, 999999)}" if self._link else ""
487 style._null = False
637 style._link = self._link
638 style._link_id = f"{randint(0, 999999)}" if self._link else ""
639 style._hash = self._hash
683 style._link = link
684 style._link_id = f"{randint(0, 999999)}" if link else ""
685 style._hash = None
506 """Split tokens to one per line.""" 507 assert lexer # required to make MyPy happy - we know lexer is not None at this point 508
907 """ 908 assert len(character) == 1, "Character must be a string of length 1" 909 if count:
924 """ 925 assert len(character) == 1, "Character must be a string of length 1" 926 if count:
940 """ 941 assert len(character) == 1, "Character must be a string of length 1" 942 if count:
1079 """ 1080 assert separator, "separator must not be empty" 1081
328 if not isinstance(suppress_entity, str):
329 assert (
330 suppress_entity.__file__ is not None
331 ), f"{suppress_entity!r} must be a module with '__file__' attribute"
332 path = os.path.dirname(suppress_entity.__file__)
772 if excluded: 773 assert exclude_frames is not None 774 yield Text(
173 x.append(None) 174 assert x[pos] is None 175 x[pos] = value
291 if comment[1] is not None: 292 assert isinstance(comment[1], list) 293 r[1] = comment[1]
301 if comment[1] is not None: 302 assert isinstance(comment[1], list) 303 r[3] = comment[1]
835 if not list_ok: 836 assert isinstance(d, dict) 837 if level >= len(key_list):
994 def add_yaml_merge(self, value: Any) -> None: 995 assert not hasattr(self, merge_attrib) 996 setattr(self, merge_attrib, value)
150 nprint = Nprint()
151 nprintf = Nprint('/var/tmp/ruamel.yaml.log')
152
197 return self.__setsingleitem__(index, value) 198 assert iter(value) 199 # nprint(index.start, index.stop, index.step, index.indices(len(self)))
138 tag = self.resolver.resolve(ScalarNode, event.value, event.implicit) 139 assert not isinstance(tag, str) 140 # e.g tag.yaml.org,2002:str
158 tag = self.resolver.resolve(SequenceNode, None, start_event.implicit) 159 assert not isinstance(tag, str) 160 node = SequenceNode(
192 tag = self.resolver.resolve(MappingNode, None, start_event.implicit) 193 assert not isinstance(tag, str) 194 node = MappingNode(
226 node.comment = [None, None] 227 assert not isinstance(node, ScalarEvent) 228 # this is a post comment on a mapping node, add as third element
575 key = self.construct_object(key_node) 576 assert key not in omap 577 value = self.construct_object(value_node)
930 def comment(self, idx: Any) -> Any: 931 assert self.loader.comment_handling is not None 932 x = self.scanner.comments[idx]
1568 key = self.construct_object(key_node) 1569 assert key not in omap 1570 value = self.construct_object(value_node)
1645 return 1646 except: # NOQA 1647 pass 1648 raise ConstructorError(
78 if major is None: 79 assert minor is None 80 return None
81 if isinstance(major, str):
82 assert minor is None
83 parts = major.split('.')
83 parts = major.split('.')
84 assert len(parts) == 2
85 return Version(int(parts[0]), int(parts[1]))
86 elif isinstance(major, tuple): 87 assert minor is None 88 assert len(major) == 2
87 assert minor is None 88 assert len(major) == 2 89 major, minor = major
89 major, minor = major 90 assert minor is not None 91 return Version(major, minor)
511 popped = self.flow_context.pop() 512 assert popped == '[' 513 self.write_indicator(self.flow_seq_end, False)
529 popped = self.flow_context.pop() 530 assert popped == '[' 531 if self.canonical:
580 popped = self.flow_context.pop()
581 assert popped == '{' # empty flow mapping
582 self.write_indicator(self.flow_map_end, False)
605 popped = self.flow_context.pop()
606 assert popped in ['{', '']
607 if self.canonical:
847 raise TypeError(f'unknown version type {type(val)}')
848 assert len(sval) == 2, f'version can only have major.minor, got {val}'
849 assert sval[0] == 1, f'version major part can only be 1, got {val}'
848 assert len(sval) == 2, f'version can only have major.minor, got {val}'
849 assert sval[0] == 1, f'version major part can only be 1, got {val}'
850 assert sval[1] in [1, 2], f'version minor part can only be 2 or 1, got {val}'
849 assert sval[0] == 1, f'version major part can only be 1, got {val}'
850 assert sval[1] in [1, 2], f'version minor part can only be 2 or 1, got {val}'
851 self._version = sval
237 event = StreamEndEvent(token.start_mark, token.end_mark, comment=token.comment) 238 assert not self.states 239 assert not self.marks
238 assert not self.states 239 assert not self.marks 240 self.state = None
836 # nprintf('>>>dcxxx', comment, line)
837 assert comment[0][0] == line + 1
838 # if comment[0] - line > 1:
763 if comments is None: 764 assert hasattr(node, 'comment') 765 return node
771 if nc is not None: 772 assert val is None or val == nc 773 comments[idx] = nc
929 node_item.comment = [None, item_comment[1]] 930 assert getattr(node_item.value[0][0], 'comment', None) is None 931 node_item.value[0][0].comment = [item_comment[0], None]
989 if item_comment: 990 assert getattr(node_key, 'comment', None) is None 991 node_key.comment = item_comment[:2]
334 # assume string
335 assert isinstance(version, str)
336 return tuple(map(int, version.split('.')))
1204 for x in trailing: 1205 assert x[-1] == '\n' 1206 self.comments.add_blank_line(x, 0, line) # type: ignore
2054 if comment.count('\n') == 1:
2055 assert comment[-1] == '\n'
2056 else:
2056 else: 2057 assert '\n' not in comment 2058 self.comments[line] = retval = EOLComment(comment[:-1], line, column)
2063 # info = inspect.getframeinfo(inspect.stack()[1][0])
2064 assert comment.count('\n') == 1 and comment[-1] == '\n'
2065 assert line not in self.comments
2064 assert comment.count('\n') == 1 and comment[-1] == '\n'
2065 assert line not in self.comments
2066 self.comments[line] = retval = BlankLineComment(comment[:-1], line, column)
2071 # info = inspect.getframeinfo(inspect.stack()[1][0])
2072 assert comment.count('\n') == 1 and comment[-1] == '\n'
2073 # if comment.startswith('# C12'):
2216 super().__init__(*arg, **kw) 2217 assert self.loader is not None 2218 # comments isinitialised on .need_more_tokens and persist on
2325 blmark = self.reader.get_mark() 2326 assert blmark.column == 0 2327 blanks = ""
125 anchor = node.anchor.value 126 except: # NOQA 127 pass 128 self.anchors[node] = anchor
63 return self._trval 64 assert self._transform_type is not None 65 if not self._transform_type:
119 """ 120 assert self._transform_type is None 121 self._transform_type = val
31 arguments.append('line: ' + str(self.start_mark.line))
32 except: # NOQA
33 pass
34 try:
35 arguments.append('comment: ' + str(self._comment))
36 except: # NOQA
37 pass
38 return f'{self.__class__.__name__}({", ".join(arguments)})'
62 else: 63 assert len(self._comment) in [2, 5] # make sure it is version 0 64 # if isinstance(comment, CommentToken):
73 else: 74 assert len(self._comment) == 2 # make sure it is version 0 75 assert self._comment[1] is None
74 assert len(self._comment) == 2 # make sure it is version 0 75 assert self._comment[1] is None 76 self._comment[1] = comments
83 else: 84 assert len(self._comment) == 3 85 if self._comment[0] is None:
92 else: 93 assert len(self._comment) == 3 94 assert self._comment[1] is None
93 assert len(self._comment) == 3 94 assert self._comment[1] is None 95 if self.comment[1] is None:
104 else: 105 assert len(self._comment) == 3 106 if self._comment[2] is None:
341 if start_mark is None: 342 assert column is not None 343 self._column = column
365 v += ', line: ' + str(self.start_mark.line) 366 except: # NOQA 367 pass 368 try:
369 v += ', col: ' + str(self.start_mark.column)
370 except: # NOQA
371 pass
372 return f'CommentToken({v})'
205 206 assert isinstance(cfg, ConfigObj) 207 for c in cfg.initial_comment:
220 221 assert isinstance(s, Section) 222 indent = ' ' * level
470 headers = get_meta_http_headers()
471 r = requests.get(
472 "https://pypi.org/pypi/{name}/{version}/json".format(
473 name=self.key, version=version
474 ),
475 headers=headers,
476 )
477 hashes = []
153 p = Path(__file__).parent / "templates"
154 env = jinja2.Environment(loader=jinja2.FileSystemLoader(Path(p))) # type: ignore
155 template = env.get_template("pr.jinja2")
203 p = Path(__file__).parent / "templates"
204 env = jinja2.Environment(loader=jinja2.FileSystemLoader(Path(p))) # type: ignore
205 template = env.get_template("issue.jinja2")
261 """
262 return hashlib.sha1(
263 b"blob " + str(len(raw_contents)).encode("ascii") + b"\0" + raw_contents
264 ).hexdigest()
265
300
301 r = requests.get(
302 "https://pyup.io/api/v1/changelogs/{}/".format(package), headers=headers
303 )
304
72 73 import subprocess 74
279 timeout = proxy["timeout"] 280 except Exception: 281 pass 282
1 import logging 2 import subprocess 3 import sys
959 try: 960 branch_name = subprocess.check_output( 961 ["git", "rev-parse", "--abbrev-ref", "HEAD"], 962 stderr=subprocess.DEVNULL, 963 text=True, 964 ).strip() 965 return branch_name if branch_name else None
959 try: 960 branch_name = subprocess.check_output( 961 ["git", "rev-parse", "--abbrev-ref", "HEAD"], 962 stderr=subprocess.DEVNULL, 963 text=True, 964 ).strip() 965 return branch_name if branch_name else None
5 import shutil 6 import subprocess 7 import sys
154 args = [command_path, "--version"] 155 result = subprocess.run(args, capture_output=True, text=True, env=get_env()) 156
1 from dataclasses import dataclass 2 from subprocess import CompletedProcess 3
172 allowed.append(version) 173 except Exception: 174 pass 175
1250 file_loader = PackageLoader('safety', 'templates')
1251 env = Environment(loader=file_loader)
1252 template = env.get_template(template)
19 console.clear() 20 for __ in range(random.randint(50, 100)): # number of confetti pieces 21 x = random.randint(0, max(0, width - 1))
20 for __ in range(random.randint(50, 100)): # number of confetti pieces 21 x = random.randint(0, max(0, width - 1)) 22 y = random.randint(0, max(0, height - 2))
21 x = random.randint(0, max(0, width - 1)) 22 y = random.randint(0, max(0, height - 2)) 23 char = random.choice(chars)
22 y = random.randint(0, max(0, height - 2)) 23 char = random.choice(chars) 24 color = random.choice(["red", "green", "yellow", "blue", "magenta", "cyan"])
23 char = random.choice(chars) 24 color = random.choice(["red", "green", "yellow", "blue", "magenta", "cyan"]) 25 console.print(
61 console.clear() 62 sparkles = random.choice( 63 [":sparkles:", ":glowing_star:", ":dizzy:", ":party_popper:"] 64 ) 65 console.print(trophy, style="bold yellow")
81 console.clear() 82 for __ in range(random.randint(5, 10)): # Balloons per frame 83 x = random.randint(0, width - 1)
82 for __ in range(random.randint(5, 10)): # Balloons per frame 83 x = random.randint(0, width - 1) 84 balloon = random.choice(balloons)
83 x = random.randint(0, width - 1) 84 balloon = random.choice(balloons) 85 console.print(
87 )
88 console.file.write(f"\x1b[{random.randint(1, 10)};{x}H")
89 console.file.flush()
111 console.clear() 112 line = " ".join(random.choices(parade, k=width // 2)) 113 console.print(line, style="bold green", justify="center")
128 for __ in range(100): # Confetti pieces per frame 129 x = random.randint(0, width - 1) 130 char = random.choice(["*", "+", "~", ":sparkles:", "o"])
129 x = random.randint(0, width - 1) 130 char = random.choice(["*", "+", "~", ":sparkles:", "o"]) 131 color = random.choice(colors)
130 char = random.choice(["*", "+", "~", ":sparkles:", "o"]) 131 color = random.choice(colors) 132 console.print(
134 )
135 console.file.write(f"\x1b[{random.randint(1, 10)};{x}H")
136 console.file.flush()
156 for _ in range(15): # Number of fireworks 157 x = random.randint(5, width - 5) 158 y = random.randint(2, 8)
157 x = random.randint(5, width - 5) 158 y = random.randint(2, 8) 159 firework = random.choice(fireworks)
158 y = random.randint(2, 8) 159 firework = random.choice(fireworks) 160 color = random.choice(["red", "yellow", "green", "blue", "magenta"])
159 firework = random.choice(fireworks) 160 color = random.choice(["red", "yellow", "green", "blue", "magenta"]) 161 console.print(
180 console.clear() 181 start_x = random.randint(0, width // 2) 182 trail = "".join(random.choices(stars, k=10))
181 start_x = random.randint(0, width // 2)
182 trail = "".join(random.choices(stars, k=10))
183 console.print(f"{' ' * start_x}{trail}", style="bold yellow", justify="left")
201 width = console.size.width 202 wave = [random.choice(emojis) for _ in range(width)] 203
217 # Move contestants forward by random increments 218 pos1 += random.randint(1, 3) 219 pos2 += random.randint(1, 3)
218 pos1 += random.randint(1, 3) 219 pos2 += random.randint(1, 3) 220
271 if egg_mode == "ascii": 272 art = random.choice(ASCII_ARTS["ascii"]) 273 console.print(art, style="green")
275 elif egg_mode == "fx": 276 effect = random.choice(CELEBRATION_EFFECTS) 277 effect(console) # Run the randomly selected effect
282 elif egg_mode == "fortune":
283 fortune_message = random.choice(FORTUNES)
284 console.print(f"\n[italic cyan]{fortune_message}[/italic cyan]\n")
4 from pathlib import Path 5 import subprocess 6 from typing import TYPE_CHECKING, Optional, Tuple
59 60 token = "token" 61 api_key = "api_key"
129 return (
130 subprocess.run(
131 self.git + cmd, stdout=subprocess.PIPE, stderr=subprocess.DEVNULL
132 )
133 .stdout.decode("utf-8")
206 return raw_describe.split("-dirty")[0]
207 except Exception:
208 pass
209
5 import shutil 6 import subprocess 7 import time
221 222 subprocess.Popen(CMD, **kwargs) 223 except Exception as e:
294 logger.debug(f"Running subprocess with capture_output={self._capture_output}")
295 process = subprocess.run(
296 args, capture_output=self._capture_output, env=self.env(ctx)
297 )
298 logger.debug(f"Subprocess completed with returncode: {process.returncode}")
330 331 result = subprocess.run(args, capture_output=True, env=self.env(ctx), text=True) 332 return self.parse_package_list_output(result.stdout)
137 tokens = [] 138 current_token = "" 139 i = 0
147 tokens.append(current_token) 148 current_token = "" 149
333 try: 334 import subprocess 335
337 try: 338 subprocess.run( 339 [shell, "-Command", "exit"], 340 capture_output=True, 341 text=True, 342 check=False, 343 ) 344 except FileNotFoundError:
353 ] 354 result = subprocess.run( 355 cmd, capture_output=True, text=True, check=False 356 ) 357 result_stdout = result.stdout.strip()
2 import shutil 3 import subprocess 4 from pathlib import Path
69 70 result = subprocess.run( 71 [ 72 get_unwrapped_command(name="npm"), 73 "config", 74 "set", 75 "registry", 76 repository_url, 77 "--location", 78 "project", 79 ], 80 capture_output=True, 81 cwd=project_root, 82 env=get_env(), 83 ) 84
109 ) 110 result = subprocess.run( 111 [ 112 get_unwrapped_command(name="npm"), 113 "config", 114 "set", 115 "-g", 116 "registry", 117 repository_url, 118 ], 119 capture_output=True, 120 env=get_env(), 121 ) 122
128
129 query_config_result = subprocess.run(
130 [
131 get_unwrapped_command(name="npm"),
132 "config",
133 "get",
134 "globalconfig",
135 ],
136 capture_output=True,
137 env=get_env(),
138 )
139 config_file_path = query_config_result.stdout.decode("utf-8").strip()
154 try: 155 subprocess.run( 156 [ 157 get_unwrapped_command(name="npm"), 158 "config", 159 "set", 160 "-g", 161 "registry", 162 ], 163 capture_output=True, 164 env=get_env(), 165 ) 166 except Exception:
3 import shutil 4 import subprocess 5 from pathlib import Path
92 93 result = subprocess.run( 94 [ 95 get_unwrapped_command(name="pip"), 96 "config", 97 "--user", 98 "set", 99 "global.index-url", 100 repository_url, 101 ], 102 capture_output=True, 103 env=get_env(), 104 ) 105
129 try: 130 subprocess.run( 131 [ 132 get_unwrapped_command(name="pip"), 133 "config", 134 "--user", 135 "unset", 136 "global.index-url", 137 ], 138 capture_output=True, 139 env=get_env(), 140 ) 141 except Exception:
2 import shutil 3 import subprocess 4 from pathlib import Path
86 87 result = subprocess.run( 88 [ 89 get_unwrapped_command(name="poetry"), 90 "source", 91 "add", 92 "safety", 93 repository_url, 94 ], 95 capture_output=True, 96 env=get_env(), 97 ) 98
1 import sys 2 import subprocess 3 import shutil
26 27 where_result = subprocess.run( 28 ["where.exe", lookup_term], 29 capture_output=True, 30 text=True, 31 env=get_env(), 32 ) 33
26 27 where_result = subprocess.run( 28 ["where.exe", lookup_term], 29 capture_output=True, 30 text=True, 31 env=get_env(), 32 ) 33
391 """ 392 import subprocess 393
395 return (
396 subprocess.run(
397 commandline, stdout=subprocess.PIPE, stderr=subprocess.DEVNULL
398 )
399 .stdout.decode("utf-8")
423 result["origin"] = git_command(["git", "remote", "get-url", "origin"]) 424 except Exception: 425 pass 426
1003 ) 1004 except Exception: 1005 pass 1006 self.fail(msg.format(hint=hint), param, ctx)
1460 SYSTEM_CONFIG_DIR.mkdir(parents=True, exist_ok=True) 1461 except Exception: 1462 pass 1463
87 class AuthenticationType(str, Enum): 88 TOKEN = "token" 89 API_KEY = "api_key"
87 allowed.append(version) 88 except Exception: 89 pass 90
141 class AuthenticationMethod(str, Enum): 142 token = "token" 143 api_key = "api_key"
16 import os 17 import subprocess 18 import contextlib
82 try: 83 path = subprocess.check_output( 84 [ 85 os.path.join( 86 root, "Microsoft Visual Studio", "Installer", "vswhere.exe" 87 ), 88 "-latest", 89 "-prerelease", 90 "-requires", 91 "Microsoft.VisualStudio.Component.VC.Tools.x86.x64", 92 "-property", 93 "installationPath", 94 "-products", 95 "*", 96 ], 97 encoding="mbcs", 98 errors="strict", 99 ).strip() 100 except (subprocess.CalledProcessError, OSError, UnicodeDecodeError):
145 try:
146 out = subprocess.check_output(
147 f'cmd /u /c "{vcvarsall}" {plat_spec} && set',
148 stderr=subprocess.STDOUT,
149 ).decode('utf-16le', errors='replace')
150 except subprocess.CalledProcessError as exc:
240 # multi-init means we would need to check platform same each time... 241 assert not self.initialized, "don't init multiple times" 242 if plat_name is None:
344 objects = self.object_filenames(sources, strip_dir=0, output_dir=outdir) 345 assert len(objects) == len(sources) 346
411 objects = self.object_filenames(sources, output_dir=output_dir) 412 assert len(objects) == len(sources) 413
960 def shared_object_filename(self, basename, strip_dir=0, output_dir=''): 961 assert output_dir is not None 962 if strip_dir:
966 def executable_filename(self, basename, strip_dir=0, output_dir=''): 967 assert output_dir is not None 968 if strip_dir:
974 ): 975 assert output_dir is not None 976 expected = '"static", "shared", "dylib", "xcode_stub"'
976 expected = '"static", "shared", "dylib", "xcode_stub"'
977 if lib_type not in eval(expected):
978 raise ValueError(f"'lib_type' must be {expected}")
8 import functools 9 import subprocess 10 import sysconfig
44 return {}
45 homebrew_prefix = subprocess.check_output(['brew', '--prefix'], text=True).strip()
46 return locals()
44 return {}
45 homebrew_prefix = subprocess.check_output(['brew', '--prefix'], text=True).strip()
46 return locals()
5 6 import subprocess 7 import sys
361 362 out = os.popen(q_cmd) 363 try:
370 ell = line.strip().split() 371 assert len(ell) == 2 372 binary_rpms.append(ell[1])
393 srpm = os.path.join(rpm_dir['SRPMS'], source_rpm) 394 assert os.path.exists(srpm) 395 self.move_file(srpm, self.dist_dir)
66 self.optimize = int(self.optimize) 67 assert 0 <= self.optimize <= 2 68 except (ValueError, AssertionError):
378 for (package_, module, module_file) in modules: 379 assert package == package_ 380 self.build_module(module, module_file, package)
97 url = self.repository + '?:action=list_classifiers' 98 response = urllib.request.urlopen(url) 99 log.info(self._read_pypi_response(response))
142 choice = 'x' 143 username = password = '' 144
206 data = {':action': 'user'}
207 data['name'] = data['password'] = data['email'] = ''
208 data['confirm'] = None
216 if data['password'] != data['confirm']: 217 data['password'] = '' 218 data['confirm'] = None
42 self.username = '' 43 self.password = '' 44 self.show_response = 0
185 try: 186 result = urlopen(request) 187 status = result.getcode()
266 code = f.read().replace(r'\r\n', r'\n') 267 exec(code, g) 268 finally:
13 import warnings 14 from subprocess import check_output 15
355 '''Try to determine if the compiler that would be used is from cygwin.''' 356 out_string = check_output(shlex.split(cc) + ['-dumpmachine']) 357 return out_string.strip().endswith(b'cygwin')
206 self.data_files = None 207 self.password = '' 208
114 def _check_alias_dict(self, aliases, what): 115 assert isinstance(aliases, dict) 116 for (alias, opt) in aliases.items():
253 else: 254 assert len(opt) > 2 and opt[:2] == '--' 255 opt = opt[2:]
261 if not self.takes_arg[opt]: # boolean option? 262 assert val == '', "boolean option can't have value" 263 alias = self.negative_alias.get(opt)
353 pattern_re = glob_to_re(pattern) 354 assert pattern_re.startswith(start) and pattern_re.endswith(end) 355 else:
359 prefix_re = glob_to_re(prefix) 360 assert prefix_re.startswith(start) and prefix_re.endswith(end) 361 prefix_re = prefix_re[len(start) : len(prefix_re) - len(end)]
15 import os 16 import subprocess 17 import sys
282 log.debug("Calling 'vcvarsall.bat %s' (version=%s)", arch, version)
283 popen = subprocess.Popen(
284 '"{}" {} & set'.format(vcvarsall, arch),
285 stdout=subprocess.PIPE,
286 stderr=subprocess.PIPE,
287 )
288 try:
362 # multi-init means we would need to check platform same each time... 363 assert not self.initialized, "don't init multiple times" 364 if self.__version < 8.0:
10 import os 11 import subprocess 12
56 try: 57 proc = subprocess.Popen(cmd, env=env) 58 proc.wait()
10 import string 11 import subprocess 12 import sys
421
422 (script_fd, script_name) = None, mktemp(".py")
423 log.info("writing byte-compilation script '%s'", script_name)
217 else: 218 assert False, "never get here" 219
1 import os 2 import subprocess 3 import contextlib
30 if runner is None: 31 runner = functools.partial(subprocess.check_call, shell=True) 32 # In the tar command, use --strip-components=1 to strip the first path and
86 stdout = devnull if quiet else None 87 subprocess.check_call(cmd, stdout=stdout) 88 yield repo_dir
518 try: 519 return eval(use) 520 except TypeError:
3287 # Largest order statistic: https://en.wikipedia.org/wiki/Order_statistic 3288 W = exp(log(random()) / k) 3289
3291 # number with a geometric distribution. Sample it using random() and logs. 3292 next_index = k + floor(log(random()) / log(1 - W)) 3293
3296 if index == next_index: 3297 reservoir[randrange(k)] = element 3298 # The new W is the largest in a sample of k U(0, `old_W`) numbers
3298 # The new W is the largest in a sample of k U(0, `old_W`) numbers 3299 W *= exp(log(random()) / k) 3300 next_index += floor(log(random()) / log(1 - W)) + 1
3299 W *= exp(log(random()) / k) 3300 next_index += floor(log(random()) / log(1 - W)) + 1 3301
3309 # Log-transform for numerical stability for weights that are small/large 3310 weight_keys = (log(random()) / weight for weight in weights) 3311
3319 smallest_weight_key, _ = reservoir[0] 3320 weights_to_skip = log(random()) / smallest_weight_key 3321
3327 t_w = exp(weight * smallest_weight_key) 3328 r_2 = uniform(t_w, 1) # generate U(t_w, 1) 3329 weight_key = log(r_2) / weight
3331 smallest_weight_key, _ = reservoir[0] 3332 weights_to_skip = log(random()) / smallest_weight_key 3333 else:
491 pools = [tuple(pool) for pool in args] * repeat 492 return tuple(choice(pool) for pool in pools) 493
541 n = len(pool) 542 indices = sorted(randrange(n) for i in range(r)) 543 return tuple(pool[i] for i in indices)
145 version_string = os.confstr("CS_GNU_LIBC_VERSION")
146 assert version_string is not None
147 _, version = version_string.split()
12 import struct 13 import subprocess 14 import sys
105 return None 106 proc = subprocess.run([ld], stderr=subprocess.PIPE, universal_newlines=True) 107 return _parse_musl_version(proc.stderr)
129 plat = sysconfig.get_platform()
130 assert plat.startswith("linux-"), "not linux"
131
151 152 assert isinstance(marker, (list, tuple, str)) 153
225 for marker in markers: 226 assert isinstance(marker, (list, tuple, str)) 227
241 else: 242 assert marker in ["and", "or"] 243 if marker == "or":
472 """This function exists for compatibility with old typing versions.""" 473 assert isinstance(cls, GenericMeta) 474 if hasattr(cls, '_gorg'):
714 # includes "Protocol" special treatment. (Comments removed for brevity.) 715 assert extra is None # Protocols should not have extra 716 if tvars is not None:
716 if tvars is not None: 717 assert origin is not None 718 assert all(isinstance(t, typing.TypeVar) for t in tvars), tvars
717 assert origin is not None 718 assert all(isinstance(t, typing.TypeVar) for t in tvars), tvars 719 else:
1149 def copy_with(self, params): 1150 assert len(params) == 1 1151 new_type = params[0]
334 335 exec(code, locals()) 336
360 if len(candidates) != 0 or len(dirs) != 1:
361 assert len(candidates) == 1, f"Multiple {suffix} directories found"
362 return Path(parent, candidates[0])
423 dist_info_candidates = list(Path(metadata_directory).glob("*.dist-info"))
424 assert len(dist_info_candidates) <= 1
425 return str(dist_info_candidates[0]) if dist_info_candidates else None
376 f.read(skip) 377 code = marshal.load(f) 378 f.close()
372 373 assert output_dir is None # distutils build_ext doesn't pass this 374 output_dir, filename = os.path.split(output_libname)
97 egg_info_dir = self.egg_info.egg_info 98 assert os.path.isdir(egg_info_dir), ".egg-info dir should have been created" 99
37 import contextlib 38 import subprocess 39 import shlex
445 except Exception: 446 pid = random.randint(0, sys.maxsize) 447 return os.path.join(self.install_dir, "test-easy-install-%s" % pid)
1067 wheel = Wheel(wheel_path) 1068 assert wheel.is_compatible() 1069 destination = os.path.join(self.install_dir, wheel.egg_name())
163 else:
164 assert str(self.dist_info_dir).endswith(".dist-info")
165 assert Path(self.dist_info_dir, "METADATA").exists()
164 assert str(self.dist_info_dir).endswith(".dist-info")
165 assert Path(self.dist_info_dir, "METADATA").exists()
166
730 safe = re.sub(r'\W|^(?=\d)', '_', name) 731 assert safe.isidentifier() 732 return safe
90 ): 91 assert preserve_mode and preserve_times and not preserve_symlinks 92 exclude = self.get_exclusions()
96 del os.link 97 except Exception: 98 pass 99 try:
176 urllib.parse.urlparse(self.repository) 177 assert not params and not query and not fragments 178 if schema == 'http':
119 context = ssl.create_default_context() 120 with urlopen(url, context=context) as response: 121 headers = Message()
396 optional_dependencies_map = self.dynamic_cfg["optional-dependencies"]
397 assert isinstance(optional_dependencies_map, dict)
398 return {
111 f.read(8) # skip magic & date 112 code = marshal.load(f) 113 elif kind == PY_FROZEN:
242 ep = metadata.EntryPoint(value=value, name=None, group=None) 243 assert not ep.extras 244 except (TypeError, ValueError, AttributeError, AssertionError) as e:
254 # or single-use iterables 255 assert isinstance(value, (list, tuple)) 256 # verify that elements of value are strings
256 # verify that elements of value are strings 257 assert ''.join(value) != value 258 except (TypeError, ValueError, AttributeError, AssertionError) as e:
18 return True 19 except Exception: 20 pass 21 return False
43 s = next(it) # skip empty string 44 assert not s 45 return it
113 def glob2(dirname, pattern): 114 assert _isrecursive(pattern) 115 yield pattern[:0]
2 import os 3 import subprocess 4 import sys
18 return find_links.split() 19 assert isinstance(find_links, (tuple, list)) 20 return find_links
81 try: 82 subprocess.check_call(cmd) 83 except subprocess.CalledProcessError as e:
31 code = compile(norm_script, script_name, 'exec') 32 exec(code, namespace) 33
21 import itertools 22 import subprocess 23 import distutils.errors
90 try: 91 path = subprocess.check_output([ 92 join(root, "Microsoft Visual Studio", "Installer", "vswhere.exe"), 93 "-latest", 94 "-prerelease", 95 "-requiresAny", 96 "-requires", "Microsoft.VisualStudio.Component.VC.Tools.x86.x64", 97 "-requires", "Microsoft.VisualStudio.Workload.WDExpress", 98 "-property", "installationPath", 99 "-products", "*", 100 ]).decode(encoding="mbcs", errors="strict").strip() 101 except (subprocess.CalledProcessError, OSError, UnicodeDecodeError):
172 try:
173 out = subprocess.check_output(
174 'cmd /u /c "{}" {} && set'.format(vcvarsall, plat_spec),
175 stderr=subprocess.STDOUT,
176 ).decode('utf-16le', errors='replace')
177 except subprocess.CalledProcessError as exc:
859 self.info("Doing subversion checkout from %s to %s", url, filename)
860 os.system("svn checkout%s -q %s %s" % (creds, url, filename))
861 return filename
885 self.info("Doing git clone from %s to %s", url, filename)
886 os.system("git clone --quiet %s %s" % (url, filename))
887
889 self.info("Checking out %s", rev)
890 os.system("git -C %s checkout --quiet %s" % (
891 filename,
892 rev,
893 ))
894
901 self.info("Doing hg clone from %s to %s", url, filename)
902 os.system("hg clone --quiet %s %s" % (url, filename))
903
905 self.info("Updating to %s", rev)
906 os.system("hg --cwd %s up -C -r %s -q" % (
907 filename,
908 rev,
909 ))
910
1102 if os.path.isfile(filename):
1103 return urllib.request.urlopen(url)
1104 elif path.endswith('/') and os.path.isdir(filename):
8 import contextlib 9 import pickle 10 import textwrap
45 code = compile(script, filename, 'exec') 46 exec(code, globals, locals) 47
49 for dirpath, dirnames, filenames in os.walk(src_dir, topdown=True): 50 assert not filenames 51 os.rmdir(dirpath)
1 import errno 2 import subprocess 3 import sys
15 cmd = ["ps", "-ww", "-o", "pid=", "-o", "ppid=", "-o", "args="] 16 output = subprocess.check_output(cmd) 17 except OSError as e: # Python 2-compatible FileNotFoundError.
55 c = self._c 56 assert c.is_valid() 57 if i < 0:
67 # should be the default 68 assert c.is_valid() 69 if i < 0:
85 c.use_region(ofs, l) 86 assert c.is_valid() 87 d = c.buffer()[:l]
354 if a: 355 assert len(a) == 1 356 r = a[0]
378 379 assert r.includes_ofs(offset) 380 return r
18 def __init__(self, size, prefix=''): 19 assert size, "Require size to be larger 0" 20
27 28 assert os.path.getsize(self.path) == size 29
33 buf = SlidingWindowMapBuffer() # can create uninitialized buffers 34 assert buf.cursor() is None 35
38 buf.end_access() 39 assert len(buf) == 0 40
42 offset = 100 43 assert buf.begin_access(c, fc.size) == False 44 assert buf.begin_access(c, offset) == True
43 assert buf.begin_access(c, fc.size) == False 44 assert buf.begin_access(c, offset) == True 45 assert len(buf) == fc.size - offset
44 assert buf.begin_access(c, offset) == True 45 assert len(buf) == fc.size - offset 46 assert buf.cursor().is_valid()
45 assert len(buf) == fc.size - offset 46 assert buf.cursor().is_valid() 47
48 # empty begin access keeps it valid on the same path, but alters the offset 49 assert buf.begin_access() == True 50 assert len(buf) == fc.size
49 assert buf.begin_access() == True 50 assert len(buf) == fc.size 51 assert buf.cursor().is_valid()
50 assert len(buf) == fc.size 51 assert buf.cursor().is_valid() 52
55 data = fp.read() 56 assert data[offset] == buf[0] 57 assert data[offset:offset * 2] == buf[0:offset]
56 assert data[offset] == buf[0] 57 assert data[offset:offset * 2] == buf[0:offset] 58
59 # negative indices, partial slices 60 assert buf[-1] == buf[len(buf) - 1] 61 assert buf[-10:] == buf[len(buf) - 10:len(buf)]
60 assert buf[-1] == buf[len(buf) - 1] 61 assert buf[-10:] == buf[len(buf) - 10:len(buf)] 62
64 buf.end_access() 65 assert not buf.cursor().is_valid() 66 assert buf.cursor().is_associated() # but it remains associated
65 assert not buf.cursor().is_valid() 66 assert buf.cursor().is_associated() # but it remains associated 67
68 # an empty begin access fixes it up again 69 assert buf.begin_access() == True and buf.cursor().is_valid() 70 del(buf) # ends access automatically
72 73 assert man_optimal.num_file_handles() == 1 74
86 buf = SlidingWindowMapBuffer(manager.make_cursor(item)) 87 assert manager.num_file_handles() == 1 88 for access_mode in range(2): # single, multi
97 if access_mode: # multi 98 ofs_start = randint(0, fsize) 99 ofs_end = randint(ofs_start, fsize)
98 ofs_start = randint(0, fsize) 99 ofs_end = randint(ofs_start, fsize) 100 d = buf[ofs_start:ofs_end]
100 d = buf[ofs_start:ofs_end] 101 assert len(d) == ofs_end - ofs_start 102 assert d == data[ofs_start:ofs_end]
101 assert len(d) == ofs_end - ofs_start 102 assert d == data[ofs_start:ofs_end] 103 num_bytes += len(d)
105 else: 106 pos = randint(0, fsize) 107 assert buf[pos] == data[pos]
106 pos = randint(0, fsize) 107 assert buf[pos] == data[pos] 108 num_bytes += 1
112 buf.end_access() 113 assert manager.num_file_handles() 114 assert manager.collect()
113 assert manager.num_file_handles() 114 assert manager.collect() 115 assert manager.num_file_handles() == 0
114 assert manager.collect() 115 assert manager.num_file_handles() == 0 116 elapsed = max(time() - st, 0.001) # prevent zero division errors on windows
22 ci = WindowCursor(man) # invalid cursor 23 assert not ci.is_valid() 24 assert not ci.is_associated()
23 assert not ci.is_valid() 24 assert not ci.is_associated() 25 assert ci.size() == 0 # this is cached, so we can query it in invalid state
24 assert not ci.is_associated() 25 assert ci.size() == 0 # this is cached, so we can query it in invalid state 26
27 cv = man.make_cursor(fc.path) 28 assert not cv.is_valid() # no region mapped yet 29 assert cv.is_associated() # but it know where to map it from
28 assert not cv.is_valid() # no region mapped yet 29 assert cv.is_associated() # but it know where to map it from 30 assert cv.file_size() == fc.size
29 assert cv.is_associated() # but it know where to map it from 30 assert cv.file_size() == fc.size 31 assert cv.path() == fc.path
30 assert cv.file_size() == fc.size 31 assert cv.path() == fc.path 32
34 cio = copy(cv) 35 assert not cio.is_valid() and cio.is_associated() 36
37 # assign method 38 assert not ci.is_associated() 39 ci.assign(cv)
39 ci.assign(cv) 40 assert not ci.is_valid() and ci.is_associated() 41
54 for man in (static_man, slide_man): 55 assert man.num_file_handles() == 0 56 assert man.num_open_files() == 0
55 assert man.num_file_handles() == 0 56 assert man.num_open_files() == 0 57 winsize_cmp_val = 0
60 # END handle window size 61 assert man.window_size() > winsize_cmp_val 62 assert man.mapped_memory_size() == 0
61 assert man.window_size() > winsize_cmp_val 62 assert man.mapped_memory_size() == 0 63 assert man.max_mapped_memory_size() > 0
62 assert man.mapped_memory_size() == 0 63 assert man.max_mapped_memory_size() > 0 64
70 # doesn't fail if we over-allocate 71 assert man._collect_lru_region(sys.maxsize) == 0 72
78 c = man.make_cursor(item) 79 assert c.path_or_fd() is item 80 assert c.use_region(10, 10).is_valid()
79 assert c.path_or_fd() is item 80 assert c.use_region(10, 10).is_valid() 81 assert c.ofs_begin() == 10
80 assert c.use_region(10, 10).is_valid() 81 assert c.ofs_begin() == 10 82 assert c.size() == 10
81 assert c.ofs_begin() == 10 82 assert c.size() == 10 83 with open(fc.path, 'rb') as fp:
83 with open(fc.path, 'rb') as fp: 84 assert c.buffer()[:] == fp.read(20)[10:] 85
107 for item in (fc.path, fd): 108 assert len(data) == fc.size 109
114 # still empty (more about that is tested in test_memory_manager() 115 assert man.num_open_files() == 0 116 assert man.mapped_memory_size() == 0
115 assert man.num_open_files() == 0 116 assert man.mapped_memory_size() == 0 117
120 size = man.window_size() // 2 121 assert c.use_region(base_offset, size).is_valid() 122 rr = c.region()
122 rr = c.region() 123 assert rr.client_count() == 2 # the manager and the cursor and us 124
124 125 assert man.num_open_files() == 1 126 assert man.num_file_handles() == 1
125 assert man.num_open_files() == 1 126 assert man.num_file_handles() == 1 127 assert man.mapped_memory_size() == rr.size()
126 assert man.num_file_handles() == 1 127 assert man.mapped_memory_size() == rr.size() 128
129 # assert c.size() == size # the cursor may overallocate in its static version 130 assert c.ofs_begin() == base_offset 131 assert rr.ofs_begin() == 0 # it was aligned and expanded
130 assert c.ofs_begin() == base_offset 131 assert rr.ofs_begin() == 0 # it was aligned and expanded 132 if man.window_size():
133 # but isn't larger than the max window (aligned) 134 assert rr.size() == align_to_mmap(man.window_size(), True) 135 else:
135 else: 136 assert rr.size() == fc.size 137 # END ignore static managers which dont use windows and are aligned to file boundaries
138 139 assert c.buffer()[:] == data[base_offset:base_offset + (size or c.size())] 140
142 nsize = (size or fc.size) - 10 143 assert c.use_region(0, nsize).is_valid() 144 assert c.region() == rr
143 assert c.use_region(0, nsize).is_valid() 144 assert c.region() == rr 145 assert man.num_file_handles() == 1
144 assert c.region() == rr 145 assert man.num_file_handles() == 1 146 assert c.size() == nsize
145 assert man.num_file_handles() == 1 146 assert c.size() == nsize 147 assert c.ofs_begin() == 0
146 assert c.size() == nsize 147 assert c.ofs_begin() == 0 148 assert c.buffer()[:] == data[:nsize]
147 assert c.ofs_begin() == 0 148 assert c.buffer()[:] == data[:nsize] 149
152 base_offset = fc.size - (size or c.size()) + overshoot 153 assert c.use_region(base_offset, size).is_valid() 154 if man.window_size():
154 if man.window_size(): 155 assert man.num_file_handles() == 2 156 assert c.size() < size
155 assert man.num_file_handles() == 2 156 assert c.size() < size 157 assert c.region() is not rr # old region is still available, but has not cursor ref anymore
156 assert c.size() < size 157 assert c.region() is not rr # old region is still available, but has not cursor ref anymore 158 assert rr.client_count() == 1 # only held by manager
157 assert c.region() is not rr # old region is still available, but has not cursor ref anymore 158 assert rr.client_count() == 1 # only held by manager 159 else:
159 else: 160 assert c.size() < fc.size 161 # END ignore static managers which only have one handle per file
162 rr = c.region() 163 assert rr.client_count() == 2 # manager + cursor 164 assert rr.ofs_begin() < c.ofs_begin() # it should have extended itself to the left
163 assert rr.client_count() == 2 # manager + cursor 164 assert rr.ofs_begin() < c.ofs_begin() # it should have extended itself to the left 165 assert rr.ofs_end() <= fc.size # it cannot be larger than the file
164 assert rr.ofs_begin() < c.ofs_begin() # it should have extended itself to the left 165 assert rr.ofs_end() <= fc.size # it cannot be larger than the file 166 assert c.buffer()[:] == data[base_offset:base_offset + (size or c.size())]
165 assert rr.ofs_end() <= fc.size # it cannot be larger than the file 166 assert c.buffer()[:] == data[base_offset:base_offset + (size or c.size())] 167
169 c.unuse_region() 170 assert not c.is_valid() 171 if man.window_size():
173 # remove mapped regions if we have to 174 assert man.num_file_handles() == 2 175 # END ignore this for static managers
191 num_random_accesses -= 1 192 base_offset = randint(0, fc.size - 1) 193
195 if man.window_size(): 196 assert max_mapped_memory_size >= mapped_memory_size() 197 # END statistics will overshoot, which is fine
197 # END statistics will overshoot, which is fine 198 assert max_file_handles >= num_file_handles() 199 assert c.use_region(base_offset, (size or c.size())).is_valid()
198 assert max_file_handles >= num_file_handles() 199 assert c.use_region(base_offset, (size or c.size())).is_valid() 200 csize = c.size()
200 csize = c.size() 201 assert c.buffer()[:] == data[base_offset:base_offset + csize] 202 memory_read += csize
203 204 assert includes_ofs(base_offset) 205 assert includes_ofs(base_offset + csize - 1)
204 assert includes_ofs(base_offset) 205 assert includes_ofs(base_offset + csize - 1) 206 assert not includes_ofs(base_offset + csize)
205 assert includes_ofs(base_offset + csize - 1) 206 assert not includes_ofs(base_offset + csize) 207 # END while we should do an access
214 # an offset as large as the size doesn't work ! 215 assert not c.use_region(fc.size, size).is_valid() 216
217 # collection - it should be able to collect all 218 assert man.num_file_handles() 219 assert man.collect()
218 assert man.num_file_handles() 219 assert man.collect() 220 assert man.num_file_handles() == 0
219 assert man.collect() 220 assert man.num_file_handles() == 0 221 # END for each item
15 # like the amount of open file handles or the amount of mapped memory 16 assert mman.num_file_handles() == 0 17 assert mman.mapped_memory_size() == 0
16 assert mman.num_file_handles() == 0 17 assert mman.mapped_memory_size() == 0 18 # and many more ...
27 # the cursor is now associated with the file, but not yet usable 28 assert c.is_associated() 29 assert not c.is_valid()
28 assert c.is_associated() 29 assert not c.is_valid() 30
34 # To be sure your region could be mapped, query for validity 35 assert c.use_region().is_valid() # use_region returns self 36
38 # to assure you don't try to access its buffer out of its bounds 39 assert c.size() 40 c.buffer()[0] # first byte
45 # in the cursor's data. 46 assert c.ofs_begin() < c.ofs_end() 47 assert c.includes_ofs(100)
46 assert c.ofs_begin() < c.ofs_end() 47 assert c.includes_ofs(100) 48
50 # cursor will be come invalid. It cannot be used in that state 51 assert not c.use_region(fc.size, 100).is_valid() 52 # map as much as possible after skipping the first 100 bytes
52 # map as much as possible after skipping the first 100 bytes 53 assert c.use_region(100).is_valid() 54
56 c.unuse_region() 57 assert not c.is_valid() 58
64 # you can use it right away 65 assert buf.cursor().is_valid() 66
73 buf.end_access() 74 assert not buf.cursor().is_valid() # you cannot use the buffer anymore 75 assert buf.begin_access(offset=10) # start using the buffer at an offset
74 assert not buf.cursor().is_valid() # you cannot use the buffer anymore 75 assert buf.begin_access(offset=10) # start using the buffer at an offset
23 24 assert wl.ofs_end() == 1 25 assert wc.ofs_end() == 2
24 assert wl.ofs_end() == 1 25 assert wc.ofs_end() == 2 26 assert wr.ofs_end() == 8050
25 assert wc.ofs_end() == 2 26 assert wr.ofs_end() == 8050 27
30 wc.extend_left_to(wl, maxsize) 31 assert wc.ofs == 1 and wc.size == 1 32 wl.extend_right_to(wc, maxsize)
33 wl.extend_right_to(wc, maxsize) 34 assert wl.ofs == 0 and wl.size == 1 35
38 wc2.extend_left_to(wc, maxsize) 39 assert wc2.ofs == wc.ofs_end() and pofs_end == wc2.ofs_end() 40
42 wc.extend_right_to(wr, maxsize) 43 assert wc.ofs == 1 and wc.size == maxsize 44 wc.extend_right_to(wr, maxsize)
44 wc.extend_right_to(wr, maxsize) 45 assert wc.ofs == 1 and wc.size == maxsize 46
48 wc.extend_right_to(wr, sys.maxsize) 49 assert wc.ofs_end() == wr.ofs and wc.ofs == 1 50
53 wr.extend_left_to(wc2, maxsize) 54 assert wr.size == maxsize 55
56 wr.extend_left_to(wc2, sys.maxsize) 57 assert wr.ofs == wc2.ofs_end() 58
59 wc.align() 60 assert wc.ofs == 0 and wc.size == align_to_mmap(wc.size, True) 61
70 # offsets 71 assert rfull.ofs_begin() == 0 and rfull.size() == fc.size 72 assert rfull.ofs_end() == fc.size # if this method works, it works always
71 assert rfull.ofs_begin() == 0 and rfull.size() == fc.size 72 assert rfull.ofs_end() == fc.size # if this method works, it works always 73
73 74 assert rhalfofs.ofs_begin() == rofs and rhalfofs.size() == fc.size - rofs 75 assert rhalfsize.ofs_begin() == 0 and rhalfsize.size() == half_size
74 assert rhalfofs.ofs_begin() == rofs and rhalfofs.size() == fc.size - rofs 75 assert rhalfsize.ofs_begin() == 0 and rhalfsize.size() == half_size 76
76 77 assert rfull.includes_ofs(0) and rfull.includes_ofs(fc.size - 1) and rfull.includes_ofs(half_size) 78 assert not rfull.includes_ofs(-1) and not rfull.includes_ofs(sys.maxsize)
77 assert rfull.includes_ofs(0) and rfull.includes_ofs(fc.size - 1) and rfull.includes_ofs(half_size) 78 assert not rfull.includes_ofs(-1) and not rfull.includes_ofs(sys.maxsize) 79
80 # auto-refcount 81 assert rfull.client_count() == 1 82 rfull2 = rfull
82 rfull2 = rfull 83 assert rfull.client_count() == 1, "no auto-counting" 84
86 w = MapWindow.from_region(rfull) 87 assert w.ofs == rfull.ofs_begin() and w.ofs_end() == rfull.ofs_end() 88
95 96 assert len(ml) == 0 97 assert ml.path_or_fd() == item
96 assert len(ml) == 0 97 assert ml.path_or_fd() == item 98 assert ml.file_size() == fc.size
97 assert ml.path_or_fd() == item 98 assert ml.file_size() == fc.size 99 finally:
102 def test_util(self): 103 assert isinstance(is_64_bit(), bool) # just call it 104 assert align_to_mmap(1, False) == 0
103 assert isinstance(is_64_bit(), bool) # just call it 104 assert align_to_mmap(1, False) == 0 105 assert align_to_mmap(1, True) == ALLOCATIONGRANULARITY
104 assert align_to_mmap(1, False) == 0 105 assert align_to_mmap(1, True) == ALLOCATIONGRANULARITY
175 self._uc += ofs 176 assert self._uc > -1, "Increments must match decrements, usage counter negative: %i" % self._uc 177
17 try: 18 assert current_async_library() == "generic-lib" 19 finally:
31 try: 32 assert current_async_library() == "generic-lib" 33 finally:
48 async def this_is_asyncio(): 49 assert current_async_library() == "asyncio" 50 # Call it a second time to exercise the caching logic
50 # Call it a second time to exercise the caching logic 51 assert current_async_library() == "asyncio" 52 ran.append(True)
54 asyncio.run(this_is_asyncio()) 55 assert ran == [True] 56
74 async def this_is_curio(): 75 assert current_async_library() == "curio" 76 # Call it a second time to exercise the caching logic
76 # Call it a second time to exercise the caching logic 77 assert current_async_library() == "curio" 78 ran.append(True)
80 curio.run(this_is_curio) 81 assert ran == [True] 82
614 _list._check() 615 assert len(self) == len(_list) 616 assert all(key in self for key in _list)
615 assert len(self) == len(_list) 616 assert all(key in self for key in _list) 617
161 """ 162 assert key is None 163 self._len = 0
1618 try: 1619 assert self._load >= 4 1620 assert len(self._maxes) == len(self._lists)
1619 assert self._load >= 4 1620 assert len(self._maxes) == len(self._lists) 1621 assert self._len == sum(len(sublist) for sublist in self._lists)
1620 assert len(self._maxes) == len(self._lists) 1621 assert self._len == sum(len(sublist) for sublist in self._lists) 1622
1626 for pos in range(1, len(sublist)): 1627 assert sublist[pos - 1] <= sublist[pos] 1628
1631 for pos in range(1, len(self._lists)): 1632 assert self._lists[pos - 1][-1] <= self._lists[pos][0] 1633
1636 for pos in range(len(self._maxes)): 1637 assert self._maxes[pos] == self._lists[pos][-1] 1638
1641 double = self._load << 1 1642 assert all(len(sublist) <= double for sublist in self._lists) 1643
1648 for pos in range(0, len(self._lists) - 1): 1649 assert len(self._lists[pos]) >= half 1650
1651 if self._index: 1652 assert self._len == self._index[0] 1653 assert len(self._index) == self._offset + len(self._lists)
1652 assert self._len == self._index[0] 1653 assert len(self._index) == self._offset + len(self._lists) 1654
1658 leaf = self._index[self._offset + pos] 1659 assert leaf == len(self._lists[pos]) 1660
1665 if child >= len(self._index): 1666 assert self._index[pos] == 0 1667 elif child + 1 == len(self._index):
1667 elif child + 1 == len(self._index): 1668 assert self._index[pos] == self._index[child] 1669 else:
1670 child_sum = self._index[child] + self._index[child + 1] 1671 assert child_sum == self._index[pos] 1672 except:
2569 try: 2570 assert self._load >= 4 2571 assert len(self._maxes) == len(self._lists) == len(self._keys)
2570 assert self._load >= 4 2571 assert len(self._maxes) == len(self._lists) == len(self._keys) 2572 assert self._len == sum(len(sublist) for sublist in self._lists)
2571 assert len(self._maxes) == len(self._lists) == len(self._keys) 2572 assert self._len == sum(len(sublist) for sublist in self._lists) 2573
2577 for pos in range(1, len(sublist)): 2578 assert sublist[pos - 1] <= sublist[pos] 2579
2582 for pos in range(1, len(self._keys)): 2583 assert self._keys[pos - 1][-1] <= self._keys[pos][0] 2584
2587 for val_sublist, key_sublist in zip(self._lists, self._keys): 2588 assert len(val_sublist) == len(key_sublist) 2589 for val, key in zip(val_sublist, key_sublist):
2589 for val, key in zip(val_sublist, key_sublist): 2590 assert self._key(val) == key 2591
2594 for pos in range(len(self._maxes)): 2595 assert self._maxes[pos] == self._keys[pos][-1] 2596
2599 double = self._load << 1 2600 assert all(len(sublist) <= double for sublist in self._lists) 2601
2606 for pos in range(0, len(self._lists) - 1): 2607 assert len(self._lists[pos]) >= half 2608
2609 if self._index: 2610 assert self._len == self._index[0] 2611 assert len(self._index) == self._offset + len(self._lists)
2610 assert self._len == self._index[0] 2611 assert len(self._index) == self._offset + len(self._lists) 2612
2616 leaf = self._index[self._offset + pos] 2617 assert leaf == len(self._lists[pos]) 2618
2623 if child >= len(self._index): 2624 assert self._index[pos] == 0 2625 elif child + 1 == len(self._index):
2625 elif child + 1 == len(self._index): 2626 assert self._index[pos] == self._index[child] 2627 else:
2628 child_sum = self._index[child] + self._index[child + 1] 2629 assert child_sum == self._index[pos] 2630 except:
731 _list._check() 732 assert len(_set) == len(_list) 733 assert all(value in _set for value in _list)
732 assert len(_set) == len(_list) 733 assert all(value in _set for value in _list)
28 """ 29 with mock.patch.object(sys, 'executable', '/tmp/fake'): 30 sot = _cache.Cache()
77 else: 78 assert False, 'Failed to raise KeyError' 79
174 em.map(mapped, 1, 2, a='A', b='B') 175 assert False 176 except RuntimeError:
54 else: 55 assert False, 'Failed to raise KeyError'
131 # This will raise KeyError if the names don't match 132 assert (em[test_extension.name]) 133
76 return self.wait_random_min + ( 77 random.random() * (self.wait_random_max - self.wait_random_min) 78 )
199 high = super().__call__(retry_state=retry_state) 200 return random.uniform(self.min, high) 201
227 def __call__(self, retry_state: "RetryCallState") -> float: 228 jitter = random.uniform(0, self.jitter) 229 try:
152 for k in self._map: 153 assert k is not None 154 self._validate_out_of_order_table(k)
469 dt = parse_rfc3339(raw) 470 assert isinstance(dt, datetime.datetime) 471 return DateTime(
488 dt = parse_rfc3339(raw) 489 assert isinstance(dt, datetime.date) 490 date = Date(dt.year, dt.month, dt.day, trivia, raw)
501 dt = parse_rfc3339(raw + time_part) 502 assert isinstance(dt, datetime.datetime) 503 return DateTime(
520 t = parse_rfc3339(raw) 521 assert isinstance(t, datetime.time) 522 return Time(
37 if re.match(r"^\\\w+$", val):
38 return eval(f'"{val}"').encode()
39 raise TqdmTypeError(f"{val} : {typ}")
126 def create_th_lock(cls):
127 assert hasattr(cls, 'th_lock')
128 warn("create_th_lock not needed anymore", TqdmDeprecationWarning, stacklevel=2)
156 frac = max(0, min(1, frac)) 157 assert default_len > 0 158 self.frac = frac
86 overrides[k] = typ(overrides[k]) 87 except Exception: 88 pass 89 else:
325 from subprocess import check_call # nosec
326 return [int(check_call(shlex.split('tput ' + i))) - 1
327 for i in ('cols', 'lines')]
2 import re 3 import subprocess 4 from enum import Enum
117 script_content = get_completion_script( 118 prog_name=prog_name, complete_var=complete_var, shell=shell 119 ) 120 completion_path.write_text(script_content) 121 return completion_path
145 script_content = get_completion_script( 146 prog_name=prog_name, complete_var=complete_var, shell=shell 147 ) 148 path_obj.write_text(script_content) 149 return path_obj
156 script_content = get_completion_script(
157 prog_name=prog_name, complete_var=complete_var, shell=shell
158 )
159 path_obj.write_text(f"{script_content}\n")
160 return path_obj
163 def install_powershell(*, prog_name: str, complete_var: str, shell: str) -> Path: 164 subprocess.run( 165 [ 166 shell, 167 "-Command", 168 "Set-ExecutionPolicy", 169 "Unrestricted", 170 "-Scope", 171 "CurrentUser", 172 ] 173 ) 174 result = subprocess.run(
173 ) 174 result = subprocess.run( 175 [shell, "-NoProfile", "-Command", "echo", "$profile"], 176 check=True, 177 stdout=subprocess.PIPE, 178 ) 179 if result.returncode != 0: # pragma: no cover
198 script_content = get_completion_script(
199 prog_name=prog_name, complete_var=complete_var, shell=shell
200 )
201 with path_obj.open(mode="a") as f:
202 f.write(f"{script_content}\n")
211 prog_name = prog_name or click.get_current_context().find_root().info_name 212 assert prog_name 213 if complete_var is None:
219 installed_path = install_bash( 220 prog_name=prog_name, complete_var=complete_var, shell=shell 221 ) 222 return shell, installed_path 223 elif shell == "zsh":
224 installed_path = install_zsh( 225 prog_name=prog_name, complete_var=complete_var, shell=shell 226 ) 227 return shell, installed_path 228 elif shell == "fish":
229 installed_path = install_fish(
230 prog_name=prog_name, complete_var=complete_var, shell=shell
231 )
232 return shell, installed_path
233 elif shell in {"powershell", "pwsh"}:
234 installed_path = install_powershell( 235 prog_name=prog_name, complete_var=complete_var, shell=shell 236 ) 237 return shell, installed_path 238 else:
246 command_obj = group.get_command(ctx, command)
247 assert command_obj
248 docs += f"* `{command_obj.name}`"
255 command_obj = group.get_command(ctx, command) 256 assert command_obj 257 use_prefix = ""
38 if isinstance(value, str): 39 shell, path = install(shell=value) 40 else:
50 prog_name = ctx.find_root().info_name
51 assert prog_name
52 complete_var = "_{}_COMPLETE".format(prog_name.replace("-", "_").upper())
59 script_content = get_completion_script( 60 prog_name=prog_name, complete_var=complete_var, shell=shell 61 ) 62 click.echo(script_content) 63 sys.exit(0)
81 else: 82 assert isinstance(c, str) 83 use_completion = CompletionItem(c)
4 import shutil 5 import subprocess 6 import sys
464 ) -> TyperGroup:
465 assert group_info.typer_instance, (
466 "A Typer instance is needed to generate a Click Group"
467 )
468 commands: Dict[str, click.Command] = {}
502 cls = solved_info.cls or TyperGroup
503 assert issubclass(cls, TyperGroup), f"{cls} should be a subclass of {TyperGroup}"
504 group = cls(
564 ) -> click.Command: 565 assert command_info.callback, "A command must have a callback function" 566 name = command_info.name or get_command_name(command_info.callback.__name__)
846 types.append(type_) 847 assert len(types) == 1, "Typer Currently doesn't support Union types" 848 main_type = types[0]
852 main_type = get_args(main_type)[0] 853 assert not get_origin(main_type), ( 854 "List types with complex sub-types are not currently supported" 855 ) 856 is_list = True
859 for type_ in get_args(main_type): 860 assert not get_origin(type_), ( 861 "Tuple types with complex sub-types are not currently supported" 862 ) 863 types.append(
1123 if _is_macos(): 1124 return subprocess.Popen( 1125 ["open", url], stdout=subprocess.DEVNULL, stderr=subprocess.STDOUT 1126 ).wait() 1127
1123 if _is_macos(): 1124 return subprocess.Popen( 1125 ["open", url], stdout=subprocess.DEVNULL, stderr=subprocess.STDOUT 1126 ).wait() 1127
1130 if has_xdg_open: 1131 return subprocess.Popen( 1132 ["xdg-open", url], stdout=subprocess.DEVNULL, stderr=subprocess.STDOUT 1133 ).wait() 1134
1130 if has_xdg_open: 1131 return subprocess.Popen( 1132 ["xdg-open", url], stdout=subprocess.DEVNULL, stderr=subprocess.STDOUT 1133 ).wait() 1134
410 deduped_pairs.remove(pair) 411 assert not deduped_pairs, deduped_pairs 412 parameters = tuple(new_parameters)
1814 if len(params) == 1 and not typing._is_param_expr(args[0]): 1815 assert i == 0 1816 args = (args,)
2019 if len(params) == 1 and not _is_param_expr(args[0]): 2020 assert i == 0 2021 args = (args,)
2499 def __typing_unpacked_tuple_args__(self): 2500 assert self.__origin__ is Unpack 2501 assert len(self.__args__) == 1
2500 assert self.__origin__ is Unpack 2501 assert len(self.__args__) == 1 2502 arg, = self.__args__
2510 def __typing_is_unpacked_typevartuple__(self): 2511 assert self.__origin__ is Unpack 2512 assert len(self.__args__) == 1
2511 assert self.__origin__ is Unpack 2512 assert len(self.__args__) == 1 2513 return isinstance(self.__args__[0], TypeVarTuple)
3309 def __new__(cls, typename, bases, ns): 3310 assert _NamedTuple in bases 3311 for base in bases:
3381 def _namedtuple_mro_entries(bases): 3382 assert NamedTuple in bases 3383 return (_NamedTuple,)
4033 return_value = {key:
4034 value if not isinstance(value, str) else eval(value, globals, locals)
4035 for key, value in ann.items() }
4115 code = forward_ref.__forward_code__ 4116 value = eval(code, globals, locals) 4117 forward_ref.__forward_evaluated__ = True
100 globals_: dict[str, Any] = {'Any': Any, 'typing': typing, 'typing_extensions': typing_extensions}
101 exec(func_code, globals_, locals_)
102 return locals_[function_name]
132 globals_: dict[str, Any] = {'Any': Any, 'typing': typing, 'typing_extensions': typing_extensions}
133 exec(func_code, globals_, locals_)
134 return locals_[function_name]
328 # key/value pair 329 assert len(vals) >= 2 330 if combine:
259 260 assert status is not None 261 return HTTP2Response(
312 self.sock.sendall(data) 313 except Exception: 314 pass 315
44 except BaseException as e: # Defensive: 45 assert not isinstance(e, KeyError) # KeyError shouldn't be possible. 46 key_lock.release()
313 if not buffer: 314 assert self._size == 0 315 return b""
850 """ 851 assert self._fp 852 c_int_max = 2**31 - 1
128 has_ipv6 = True 129 except Exception: 130 pass 131
304 if self.backoff_jitter != 0.0: 305 backoff_value += random.random() * self.backoff_jitter 306 return float(max(0, min(self.backoff_max, backoff_value)))
136 reading = "r" in mode or not writing 137 assert reading or writing 138 binary = "b" in mode
159 else: 160 assert writing 161 buffer = io.BufferedWriter(raw, buffering)
184 event = StreamEndEvent(token.start_mark, token.end_mark) 185 assert not self.states 186 assert not self.marks
185 assert not self.states 186 assert not self.marks 187 self.state = None